]> git.lizzy.rs Git - rust.git/commitdiff
Auto merge of #34723 - GuillaumeGomez:slice_doc, r=apasel422
authorbors <bors@rust-lang.org>
Sun, 10 Jul 2016 01:16:48 +0000 (18:16 -0700)
committerGitHub <noreply@github.com>
Sun, 10 Jul 2016 01:16:48 +0000 (18:16 -0700)
Remove useless doc comment for slice

r? @steveklabnik

This doc comment isn't useful so I removed it. It also "fixes" the weird rustdoc output.

Before:

![screenshot from 2016-07-08 02-04-59](https://cloud.githubusercontent.com/assets/3050060/16688164/fbb321d6-451c-11e6-8d98-53346bc6eb1e.png)

After:

![screenshot from 2016-07-08 02-05-31](https://cloud.githubusercontent.com/assets/3050060/16688166/00b174e4-451d-11e6-83dd-93ab2509f647.png)

181 files changed:
README.md
RELEASES.md
mk/main.mk
src/bootstrap/Cargo.toml
src/bootstrap/bin/main.rs [new file with mode: 0644]
src/bootstrap/bin/rustc.rs [new file with mode: 0644]
src/bootstrap/bin/rustdoc.rs [new file with mode: 0644]
src/bootstrap/bootstrap.py
src/bootstrap/build/cc.rs [deleted file]
src/bootstrap/build/channel.rs [deleted file]
src/bootstrap/build/check.rs [deleted file]
src/bootstrap/build/clean.rs [deleted file]
src/bootstrap/build/compile.rs [deleted file]
src/bootstrap/build/config.rs [deleted file]
src/bootstrap/build/dist.rs [deleted file]
src/bootstrap/build/doc.rs [deleted file]
src/bootstrap/build/flags.rs [deleted file]
src/bootstrap/build/job.rs [deleted file]
src/bootstrap/build/mod.rs [deleted file]
src/bootstrap/build/native.rs [deleted file]
src/bootstrap/build/sanity.rs [deleted file]
src/bootstrap/build/step.rs [deleted file]
src/bootstrap/build/util.rs [deleted file]
src/bootstrap/cc.rs [new file with mode: 0644]
src/bootstrap/channel.rs [new file with mode: 0644]
src/bootstrap/check.rs [new file with mode: 0644]
src/bootstrap/clean.rs [new file with mode: 0644]
src/bootstrap/compile.rs [new file with mode: 0644]
src/bootstrap/config.rs [new file with mode: 0644]
src/bootstrap/dist.rs [new file with mode: 0644]
src/bootstrap/doc.rs [new file with mode: 0644]
src/bootstrap/flags.rs [new file with mode: 0644]
src/bootstrap/job.rs [new file with mode: 0644]
src/bootstrap/lib.rs
src/bootstrap/main.rs [deleted file]
src/bootstrap/native.rs [new file with mode: 0644]
src/bootstrap/rustc.rs [deleted file]
src/bootstrap/rustdoc.rs [deleted file]
src/bootstrap/sanity.rs [new file with mode: 0644]
src/bootstrap/step.rs [new file with mode: 0644]
src/bootstrap/util.rs [new file with mode: 0644]
src/doc/book/closures.md
src/doc/book/conditional-compilation.md
src/doc/book/documentation.md
src/doc/book/getting-started.md
src/doc/book/guessing-game.md
src/doc/book/inline-assembly.md
src/doc/book/loops.md
src/doc/book/mutability.md
src/doc/book/structs.md
src/doc/book/testing.md
src/etc/gdb_rust_pretty_printing.py
src/jemalloc
src/liballoc/rc.rs
src/libcore/iter/mod.rs
src/libcore/iter/traits.rs
src/libcore/num/f32.rs
src/libcore/num/f64.rs
src/libcore/num/int_macros.rs
src/libcore/num/mod.rs
src/libcore/num/uint_macros.rs
src/libcore/ops.rs
src/librustc/cfg/construct.rs
src/librustc/hir/def.rs
src/librustc/hir/fold.rs
src/librustc/hir/intravisit.rs
src/librustc/hir/lowering.rs
src/librustc/hir/mod.rs
src/librustc/hir/pat_util.rs
src/librustc/hir/print.rs
src/librustc/middle/expr_use_visitor.rs
src/librustc/middle/mem_categorization.rs
src/librustc/mir/repr.rs
src/librustc/session/config.rs
src/librustc/ty/context.rs
src/librustc/ty/mod.rs
src/librustc_const_eval/check_match.rs
src/librustc_const_eval/eval.rs
src/librustc_driver/driver.rs
src/librustc_lint/bad_style.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_mir/hair/cx/pattern.rs
src/librustc_privacy/lib.rs
src/librustc_resolve/lib.rs
src/librustc_save_analysis/dump_visitor.rs
src/librustc_trans/_match.rs
src/librustc_trans/abi.rs
src/librustc_trans/back/link.rs
src/librustc_trans/back/lto.rs
src/librustc_trans/back/symbol_names.rs
src/librustc_trans/back/write.rs
src/librustc_trans/base.rs
src/librustc_trans/callee.rs
src/librustc_trans/closure.rs
src/librustc_trans/collector.rs
src/librustc_trans/consts.rs
src/librustc_trans/context.rs
src/librustc_trans/debuginfo/create_scope_map.rs
src/librustc_trans/debuginfo/metadata.rs
src/librustc_trans/declare.rs
src/librustc_trans/expr.rs
src/librustc_trans/glue.rs
src/librustc_trans/inline.rs
src/librustc_trans/lib.rs
src/librustc_trans/mir/operand.rs
src/librustc_trans/monomorphize.rs
src/librustc_trans/partitioning.rs
src/librustc_trans/symbol_map.rs [new file with mode: 0644]
src/librustc_trans/symbol_names_test.rs
src/librustc_trans/trans_item.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/_match.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/diagnostics.rs
src/librustc_unicode/char.rs
src/librustdoc/clean/mod.rs
src/librustdoc/html/highlight.rs
src/librustdoc/html/render.rs
src/librustdoc/html/static/rustdoc.css
src/libstd/io/error.rs
src/libstd/io/util.rs
src/libstd/memchr.rs
src/libstd/path.rs
src/libstd/primitive_docs.rs
src/libstd/sys/common/net.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/tokenstream.rs
src/libsyntax_ext/asm.rs
src/llvm
src/rustllvm/llvm-auto-clean-trigger
src/test/codegen-units/item-collection/cross-crate-closures.rs
src/test/codegen-units/item-collection/non-generic-closures.rs
src/test/codegen-units/partitioning/extern-drop-glue.rs
src/test/codegen-units/partitioning/extern-generic.rs
src/test/codegen-units/partitioning/inlining-from-extern-crate.rs
src/test/codegen-units/partitioning/local-drop-glue.rs
src/test/codegen-units/partitioning/local-generic.rs
src/test/codegen-units/partitioning/local-inlining.rs
src/test/codegen-units/partitioning/local-transitive-inlining.rs
src/test/codegen-units/partitioning/methods-are-with-self-type.rs
src/test/codegen-units/partitioning/regular-modules.rs
src/test/codegen-units/partitioning/statics.rs
src/test/codegen/drop.rs
src/test/compile-fail-fulldeps/issue-18986.rs
src/test/compile-fail/E0163.rs [deleted file]
src/test/compile-fail/asm-bad-clobber.rs [new file with mode: 0644]
src/test/compile-fail/auxiliary/lint_stability.rs
src/test/compile-fail/empty-struct-braces-pat-1.rs
src/test/compile-fail/issue-16058.rs
src/test/compile-fail/issue-17001.rs
src/test/compile-fail/issue-17405.rs
src/test/compile-fail/issue-21449.rs
src/test/compile-fail/issue-22933-1.rs [new file with mode: 0644]
src/test/compile-fail/issue-22933-2.rs [new file with mode: 0644]
src/test/compile-fail/issue-26459.rs
src/test/compile-fail/issue-27815.rs
src/test/compile-fail/issue-27831.rs
src/test/compile-fail/issue-32004.rs
src/test/compile-fail/issue-34209.rs [new file with mode: 0644]
src/test/compile-fail/issue-4736.rs
src/test/compile-fail/lexical-scopes.rs
src/test/compile-fail/lint-stability.rs
src/test/compile-fail/method-path-in-pattern.rs
src/test/compile-fail/qualified-path-params.rs
src/test/compile-fail/struct-pat-associated-path.rs [new file with mode: 0644]
src/test/compile-fail/trait-as-struct-constructor.rs
src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs
src/test/run-pass-fulldeps/macro-crate.rs
src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs [new file with mode: 0644]
src/test/run-pass/issue-27021.rs [new file with mode: 0644]
src/test/run-pass/issue34569.rs [new file with mode: 0644]
src/test/run-pass/mir_trans_calls.rs
src/test/run-pass/xcrate_generic_fn_nested_return.rs [new file with mode: 0644]

index 49236d6b671ef74cb8f4bb8f90d35171d243dc88..283efdd2411564f962cdaf210c43eec37384a55a 100644 (file)
--- a/README.md
+++ b/README.md
@@ -66,7 +66,7 @@ build.
 
 [MSYS2][msys2] can be used to easily build Rust on Windows:
 
-msys2: https://msys2.github.io/
+[msys2]: https://msys2.github.io/
 
 1. Grab the latest [MSYS2 installer][msys2] and go through the installer.
 
@@ -105,7 +105,7 @@ msys2: https://msys2.github.io/
 
 MSVC builds of Rust additionally require an installation of Visual Studio 2013
 (or later) so `rustc` can use its linker. Make sure to check the “C++ tools”
-option. In addition, `cmake` needs to be installed to build LLVM.
+option.
 
 With these dependencies installed, the build takes two steps:
 
@@ -116,13 +116,25 @@ $ make && make install
 
 #### MSVC with rustbuild
 
-For those who don't want the hassle of MSYS or MinGW, you can invoke rustbuild
-directly. All you need are Python 2, CMake, and Git in your PATH (make sure you
-do __not__ use the ones from MSYS!). You'll also need Visual Studio 2013 or
-newer with the C++ tools. Then all you need to do is invoke the appropriate
-vcvars bat file and kick off rustbuild.
+The old build system, based on makefiles, is currently being rewritten into a
+Rust-based build system called rustbuild. This can be used to bootstrap the
+compiler on MSVC without needing to install MSYS or MinGW. All you need are
+[Python 2](https://www.python.org/downloads/),
+[CMake](https://cmake.org/download/), and
+[Git](https://git-scm.com/downloads) in your PATH (make sure you do not use the
+ones from MSYS if you have it installed). You'll also need Visual Studio 2013 or
+newer with the C++ tools. Then all you need to do is to kick off rustbuild.
 
-```bat
+```
+python .\src\bootstrap\bootstrap.py
+```
+
+Currently rustbuild only works with some known versions of Visual Studio. If you
+have a more recent version installed that a part of rustbuild doesn't understand
+then you may need to force rustbuild to use an older version. This can be done
+by manually calling the appropriate vcvars file before running the bootstrap.
+
+```
 CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64\vcvars64.bat"
 python .\src\bootstrap\bootstrap.py
 ```
index ffe8d64ff2da4c71b64338b88b460a5946f63b05..c798c56cd6d03f810d74976a66d2ab53b116c11c 100644 (file)
@@ -172,7 +172,7 @@ Libraries
   (https://github.com/rust-lang/rust/pull/33050).
 * [Implement `Display` and `Hash` for `std::num::Wrapping`]
   (https://github.com/rust-lang/rust/pull/33023).
-* [Add `Default` implementation for `&CStr`, `CString`, `Path`]
+* [Add `Default` implementation for `&CStr`, `CString`]
   (https://github.com/rust-lang/rust/pull/32990).
 * [Implement `From<Vec<T>>` and `Into<Vec<T>>` for `VecDeque<T>`]
   (https://github.com/rust-lang/rust/pull/32866).
index daf656f89c1a5a79bf621baf11b6bc972ae2a193..4c72597f0c5c1d2a2192a24f9d2337a36ae46be9 100644 (file)
@@ -13,7 +13,7 @@
 ######################################################################
 
 # The version number
-CFG_RELEASE_NUM=1.11.0
+CFG_RELEASE_NUM=1.12.0
 
 # An optional number to put after the label, e.g. '.2' -> '-beta.2'
 # NB Make sure it starts with a dot to conform to semver pre-release
index f9a64567ffde0e241a5f8c4d90aaae06e5f5b673..cde4a825be1fb81259d354b3cd266bfc47ab84d9 100644 (file)
@@ -9,15 +9,15 @@ path = "lib.rs"
 
 [[bin]]
 name = "bootstrap"
-path = "main.rs"
+path = "bin/main.rs"
 
 [[bin]]
 name = "rustc"
-path = "rustc.rs"
+path = "bin/rustc.rs"
 
 [[bin]]
 name = "rustdoc"
-path = "rustdoc.rs"
+path = "bin/rustdoc.rs"
 
 [dependencies]
 build_helper = { path = "../build_helper" }
diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs
new file mode 100644 (file)
index 0000000..c47f4fd
--- /dev/null
@@ -0,0 +1,37 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! rustbuild, the Rust build system
+//!
+//! This is the entry point for the build system used to compile the `rustc`
+//! compiler. Lots of documentation can be found in the `README.md` file next to
+//! this file, and otherwise documentation can be found throughout the `build`
+//! directory in each respective module.
+
+#![deny(warnings)]
+
+extern crate bootstrap;
+
+use std::env;
+
+use bootstrap::{Flags, Config, Build};
+
+fn main() {
+    let args = env::args().skip(1).collect::<Vec<_>>();
+    let flags = Flags::parse(&args);
+    let mut config = Config::parse(&flags.build, flags.config.clone());
+
+    // compat with `./configure` while we're still using that
+    if std::fs::metadata("config.mk").is_ok() {
+        config.update_with_config_mk();
+    }
+
+    Build::new(flags, config).build();
+}
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
new file mode 100644 (file)
index 0000000..c64cbb9
--- /dev/null
@@ -0,0 +1,165 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
+//!
+//! This shim will take care of some various tasks that our build process
+//! requires that Cargo can't quite do through normal configuration:
+//!
+//! 1. When compiling build scripts and build dependencies, we need a guaranteed
+//!    full standard library available. The only compiler which actually has
+//!    this is the snapshot, so we detect this situation and always compile with
+//!    the snapshot compiler.
+//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
+//!    (and this slightly differs based on a whether we're using a snapshot or
+//!    not), so we do that all here.
+//!
+//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
+//! switching compilers for the bootstrap and for build scripts will probably
+//! never get replaced.
+
+extern crate bootstrap;
+
+use std::env;
+use std::ffi::OsString;
+use std::path::PathBuf;
+use std::process::Command;
+
+fn main() {
+    let args = env::args_os().skip(1).collect::<Vec<_>>();
+    // Detect whether or not we're a build script depending on whether --target
+    // is passed (a bit janky...)
+    let target = args.windows(2).find(|w| &*w[0] == "--target")
+                                .and_then(|w| w[1].to_str());
+
+    // Build scripts always use the snapshot compiler which is guaranteed to be
+    // able to produce an executable, whereas intermediate compilers may not
+    // have the standard library built yet and may not be able to produce an
+    // executable. Otherwise we just use the standard compiler we're
+    // bootstrapping with.
+    let (rustc, libdir) = if target.is_none() {
+        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
+    } else {
+        ("RUSTC_REAL", "RUSTC_LIBDIR")
+    };
+    let stage = env::var("RUSTC_STAGE").unwrap();
+
+    let rustc = env::var_os(rustc).unwrap();
+    let libdir = env::var_os(libdir).unwrap();
+    let mut dylib_path = bootstrap::util::dylib_path();
+    dylib_path.insert(0, PathBuf::from(libdir));
+
+    let mut cmd = Command::new(rustc);
+    cmd.args(&args)
+       .arg("--cfg").arg(format!("stage{}", stage))
+       .env(bootstrap::util::dylib_path_var(),
+            env::join_paths(&dylib_path).unwrap());
+
+    if let Some(target) = target {
+        // The stage0 compiler has a special sysroot distinct from what we
+        // actually downloaded, so we just always pass the `--sysroot` option.
+        cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap());
+
+        // When we build Rust dylibs they're all intended for intermediate
+        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
+        // linking all deps statically into the dylib.
+        cmd.arg("-Cprefer-dynamic");
+
+        // Help the libc crate compile by assisting it in finding the MUSL
+        // native libraries.
+        if let Some(s) = env::var_os("MUSL_ROOT") {
+            let mut root = OsString::from("native=");
+            root.push(&s);
+            root.push("/lib");
+            cmd.arg("-L").arg(&root);
+        }
+
+        // Pass down extra flags, commonly used to configure `-Clinker` when
+        // cross compiling.
+        if let Ok(s) = env::var("RUSTC_FLAGS") {
+            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
+        }
+
+        // If we're compiling specifically the `panic_abort` crate then we pass
+        // the `-C panic=abort` option. Note that we do not do this for any
+        // other crate intentionally as this is the only crate for now that we
+        // ship with panic=abort.
+        //
+        // This... is a bit of a hack how we detect this. Ideally this
+        // information should be encoded in the crate I guess? Would likely
+        // require an RFC amendment to RFC 1513, however.
+        let is_panic_abort = args.windows(2).any(|a| {
+            &*a[0] == "--crate-name" && &*a[1] == "panic_abort"
+        });
+        // FIXME(stage0): remove this `stage != "0"` condition
+        if is_panic_abort && stage != "0" {
+            cmd.arg("-C").arg("panic=abort");
+        }
+
+        // Set various options from config.toml to configure how we're building
+        // code.
+        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
+            cmd.arg("-g");
+        }
+        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
+            Ok(s) => if s == "true" {"y"} else {"n"},
+            Err(..) => "n",
+        };
+        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
+        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
+            cmd.arg("-C").arg(format!("codegen-units={}", s));
+        }
+
+        // Dealing with rpath here is a little special, so let's go into some
+        // detail. First off, `-rpath` is a linker option on Unix platforms
+        // which adds to the runtime dynamic loader path when looking for
+        // dynamic libraries. We use this by default on Unix platforms to ensure
+        // that our nightlies behave the same on Windows, that is they work out
+        // of the box. This can be disabled, of course, but basically that's why
+        // we're gated on RUSTC_RPATH here.
+        //
+        // Ok, so the astute might be wondering "why isn't `-C rpath` used
+        // here?" and that is indeed a good question to task. This codegen
+        // option is the compiler's current interface to generating an rpath.
+        // Unfortunately it doesn't quite suffice for us. The flag currently
+        // takes no value as an argument, so the compiler calculates what it
+        // should pass to the linker as `-rpath`. This unfortunately is based on
+        // the **compile time** directory structure which when building with
+        // Cargo will be very different than the runtime directory structure.
+        //
+        // All that's a really long winded way of saying that if we use
+        // `-Crpath` then the executables generated have the wrong rpath of
+        // something like `$ORIGIN/deps` when in fact the way we distribute
+        // rustc requires the rpath to be `$ORIGIN/../lib`.
+        //
+        // So, all in all, to set up the correct rpath we pass the linker
+        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
+        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
+        // to change a flag in a binary?
+        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
+            let rpath = if target.contains("apple") {
+                Some("-Wl,-rpath,@loader_path/../lib")
+            } else if !target.contains("windows") {
+                Some("-Wl,-rpath,$ORIGIN/../lib")
+            } else {
+                None
+            };
+            if let Some(rpath) = rpath {
+                cmd.arg("-C").arg(format!("link-args={}", rpath));
+            }
+        }
+    }
+
+    // Actually run the compiler!
+    std::process::exit(match cmd.status() {
+        Ok(s) => s.code().unwrap_or(1),
+        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
+    })
+}
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
new file mode 100644 (file)
index 0000000..79629bf
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
+//!
+//! See comments in `src/bootstrap/rustc.rs` for more information.
+
+extern crate bootstrap;
+
+use std::env;
+use std::process::Command;
+use std::path::PathBuf;
+
+fn main() {
+    let args = env::args_os().skip(1).collect::<Vec<_>>();
+    let rustdoc = env::var_os("RUSTDOC_REAL").unwrap();
+    let libdir = env::var_os("RUSTC_LIBDIR").unwrap();
+
+    let mut dylib_path = bootstrap::util::dylib_path();
+    dylib_path.insert(0, PathBuf::from(libdir));
+
+    let mut cmd = Command::new(rustdoc);
+    cmd.args(&args)
+       .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap()))
+       .arg("--cfg").arg("dox")
+       .env(bootstrap::util::dylib_path_var(),
+            env::join_paths(&dylib_path).unwrap());
+    std::process::exit(match cmd.status() {
+        Ok(s) => s.code().unwrap_or(1),
+        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
+    })
+}
+
index 832911beb588c762d6ac329e04beda0eb2217d7e..17a7c9ca66a2664488d4187d4af588b51a847a36 100644 (file)
@@ -359,7 +359,7 @@ def main():
     parser.add_argument('--clean', action='store_true')
     parser.add_argument('-v', '--verbose', action='store_true')
 
-    args = [a for a in sys.argv if a != '-h']
+    args = [a for a in sys.argv if a != '-h' and a != '--help']
     args, _ = parser.parse_known_args(args)
 
     # Configure initial bootstrap
diff --git a/src/bootstrap/build/cc.rs b/src/bootstrap/build/cc.rs
deleted file mode 100644 (file)
index ff0941a..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! C-compiler probing and detection.
-//!
-//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
-//! C and C++ compilers for each target configured. A compiler is found through
-//! a number of vectors (in order of precedence)
-//!
-//! 1. Configuration via `target.$target.cc` in `config.toml`.
-//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
-//!    applicable
-//! 3. Special logic to probe on OpenBSD
-//! 4. The `CC_$target` environment variable.
-//! 5. The `CC` environment variable.
-//! 6. "cc"
-//!
-//! Some of this logic is implemented here, but much of it is farmed out to the
-//! `gcc` crate itself, so we end up having the same fallbacks as there.
-//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
-//! used.
-//!
-//! It is intended that after this module has run no C/C++ compiler will
-//! ever be probed for. Instead the compilers found here will be used for
-//! everything.
-
-use std::process::Command;
-
-use build_helper::{cc2ar, output};
-use gcc;
-
-use build::Build;
-use build::config::Target;
-
-pub fn find(build: &mut Build) {
-    // For all targets we're going to need a C compiler for building some shims
-    // and such as well as for being a linker for Rust code.
-    for target in build.config.target.iter() {
-        let mut cfg = gcc::Config::new();
-        cfg.cargo_metadata(false).opt_level(0).debug(false)
-           .target(target).host(&build.config.build);
-
-        let config = build.config.target_config.get(target);
-        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
-            cfg.compiler(cc);
-        } else {
-            set_compiler(&mut cfg, "gcc", target, config);
-        }
-
-        let compiler = cfg.get_compiler();
-        let ar = cc2ar(compiler.path(), target);
-        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
-        if let Some(ref ar) = ar {
-            build.verbose(&format!("AR_{} = {:?}", target, ar));
-        }
-        build.cc.insert(target.to_string(), (compiler, ar));
-    }
-
-    // For all host triples we need to find a C++ compiler as well
-    for host in build.config.host.iter() {
-        let mut cfg = gcc::Config::new();
-        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
-           .target(host).host(&build.config.build);
-        let config = build.config.target_config.get(host);
-        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
-            cfg.compiler(cxx);
-        } else {
-            set_compiler(&mut cfg, "g++", host, config);
-        }
-        let compiler = cfg.get_compiler();
-        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
-        build.cxx.insert(host.to_string(), compiler);
-    }
-}
-
-fn set_compiler(cfg: &mut gcc::Config,
-                gnu_compiler: &str,
-                target: &str,
-                config: Option<&Target>) {
-    match target {
-        // When compiling for android we may have the NDK configured in the
-        // config.toml in which case we look there. Otherwise the default
-        // compiler already takes into account the triple in question.
-        t if t.contains("android") => {
-            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
-                let target = target.replace("armv7", "arm");
-                let compiler = format!("{}-{}", target, gnu_compiler);
-                cfg.compiler(ndk.join("bin").join(compiler));
-            }
-        }
-
-        // The default gcc version from OpenBSD may be too old, try using egcc,
-        // which is a gcc version from ports, if this is the case.
-        t if t.contains("openbsd") => {
-            let c = cfg.get_compiler();
-            if !c.path().ends_with(gnu_compiler) {
-                return
-            }
-
-            let output = output(c.to_command().arg("--version"));
-            let i = match output.find(" 4.") {
-                Some(i) => i,
-                None => return,
-            };
-            match output[i + 3..].chars().next().unwrap() {
-                '0' ... '6' => {}
-                _ => return,
-            }
-            let alternative = format!("e{}", gnu_compiler);
-            if Command::new(&alternative).output().is_ok() {
-                cfg.compiler(alternative);
-            }
-        }
-
-        _ => {}
-    }
-}
diff --git a/src/bootstrap/build/channel.rs b/src/bootstrap/build/channel.rs
deleted file mode 100644 (file)
index 76d061e..0000000
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Build configuration for Rust's release channels.
-//!
-//! Implements the stable/beta/nightly channel distinctions by setting various
-//! flags like the `unstable_features`, calculating variables like `release` and
-//! `package_vers`, and otherwise indicating to the compiler what it should
-//! print out as part of its version information.
-
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::process::Command;
-
-use build_helper::output;
-use md5;
-
-use build::Build;
-
-pub fn collect(build: &mut Build) {
-    // Currently the canonical source for the release number (e.g. 1.10.0) and
-    // the prerelease version (e.g. `.1`) is in `mk/main.mk`. We "parse" that
-    // here to learn about those numbers.
-    let mut main_mk = String::new();
-    t!(t!(File::open(build.src.join("mk/main.mk"))).read_to_string(&mut main_mk));
-    let mut release_num = "";
-    let mut prerelease_version = "";
-    for line in main_mk.lines() {
-        if line.starts_with("CFG_RELEASE_NUM") {
-            release_num = line.split('=').skip(1).next().unwrap().trim();
-        }
-        if line.starts_with("CFG_PRERELEASE_VERSION") {
-            prerelease_version = line.split('=').skip(1).next().unwrap().trim();
-        }
-    }
-
-    // Depending on the channel, passed in `./configure --release-channel`,
-    // determine various properties of the build.
-    match &build.config.channel[..] {
-        "stable" => {
-            build.release = release_num.to_string();
-            build.package_vers = build.release.clone();
-            build.unstable_features = false;
-        }
-        "beta" => {
-            build.release = format!("{}-beta{}", release_num,
-                                   prerelease_version);
-            build.package_vers = "beta".to_string();
-            build.unstable_features = false;
-        }
-        "nightly" => {
-            build.release = format!("{}-nightly", release_num);
-            build.package_vers = "nightly".to_string();
-            build.unstable_features = true;
-        }
-        _ => {
-            build.release = format!("{}-dev", release_num);
-            build.package_vers = build.release.clone();
-            build.unstable_features = true;
-        }
-    }
-    build.version = build.release.clone();
-
-    // If we have a git directory, add in some various SHA information of what
-    // commit this compiler was compiled from.
-    if fs::metadata(build.src.join(".git")).is_ok() {
-        let ver_date = output(Command::new("git").current_dir(&build.src)
-                                      .arg("log").arg("-1")
-                                      .arg("--date=short")
-                                      .arg("--pretty=format:%cd"));
-        let ver_hash = output(Command::new("git").current_dir(&build.src)
-                                      .arg("rev-parse").arg("HEAD"));
-        let short_ver_hash = output(Command::new("git")
-                                            .current_dir(&build.src)
-                                            .arg("rev-parse")
-                                            .arg("--short=9")
-                                            .arg("HEAD"));
-        let ver_date = ver_date.trim().to_string();
-        let ver_hash = ver_hash.trim().to_string();
-        let short_ver_hash = short_ver_hash.trim().to_string();
-        build.version.push_str(&format!(" ({} {})", short_ver_hash,
-                                       ver_date));
-        build.ver_date = Some(ver_date.to_string());
-        build.ver_hash = Some(ver_hash);
-        build.short_ver_hash = Some(short_ver_hash);
-    }
-
-    // Calculate this compiler's bootstrap key, which is currently defined as
-    // the first 8 characters of the md5 of the release string.
-    let key = md5::compute(build.release.as_bytes());
-    build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
-                                  key[0], key[1], key[2], key[3]);
-
-    // Slurp up the stage0 bootstrap key as we're bootstrapping from an
-    // otherwise stable compiler.
-    let mut s = String::new();
-    t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
-    if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
-        if let Some(key) = line.split(": ").nth(1) {
-            build.bootstrap_key_stage0 = key.to_string();
-        }
-    }
-}
diff --git a/src/bootstrap/build/check.rs b/src/bootstrap/build/check.rs
deleted file mode 100644 (file)
index 0a096f8..0000000
+++ /dev/null
@@ -1,414 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of the various `check-*` targets of the build system.
-//!
-//! This file implements the various regression test suites that we execute on
-//! our CI.
-
-use std::env;
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build_helper::output;
-use bootstrap::{dylib_path, dylib_path_var};
-
-use build::{Build, Compiler, Mode};
-use build::util;
-
-const ADB_TEST_DIR: &'static str = "/data/tmp";
-
-/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will verify the validity of all our links in the
-/// documentation to ensure we don't have a bunch of dead ones.
-pub fn linkcheck(build: &Build, stage: u32, host: &str) {
-    println!("Linkcheck stage{} ({})", stage, host);
-    let compiler = Compiler::new(stage, host);
-    build.run(build.tool_cmd(&compiler, "linkchecker")
-                   .arg(build.out.join(host).join("doc")));
-}
-
-/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will check out a few Rust projects and run `cargo
-/// test` to ensure that we don't regress the test suites there.
-pub fn cargotest(build: &Build, stage: u32, host: &str) {
-    let ref compiler = Compiler::new(stage, host);
-
-    // Configure PATH to find the right rustc. NB. we have to use PATH
-    // and not RUSTC because the Cargo test suite has tests that will
-    // fail if rustc is not spelled `rustc`.
-    let path = build.sysroot(compiler).join("bin");
-    let old_path = ::std::env::var("PATH").expect("");
-    let sep = if cfg!(windows) { ";" } else {":" };
-    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
-
-    // Note that this is a short, cryptic, and not scoped directory name. This
-    // is currently to minimize the length of path on Windows where we otherwise
-    // quickly run into path name limit constraints.
-    let out_dir = build.out.join("ct");
-    t!(fs::create_dir_all(&out_dir));
-
-    build.run(build.tool_cmd(compiler, "cargotest")
-                   .env("PATH", newpath)
-                   .arg(&build.cargo)
-                   .arg(&out_dir));
-}
-
-/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` checks up on various bits and pieces of style and
-/// otherwise just implements a few lint-like checks that are specific to the
-/// compiler itself.
-pub fn tidy(build: &Build, stage: u32, host: &str) {
-    println!("tidy check stage{} ({})", stage, host);
-    let compiler = Compiler::new(stage, host);
-    build.run(build.tool_cmd(&compiler, "tidy")
-                   .arg(build.src.join("src")));
-}
-
-fn testdir(build: &Build, host: &str) -> PathBuf {
-    build.out.join(host).join("test")
-}
-
-/// Executes the `compiletest` tool to run a suite of tests.
-///
-/// Compiles all tests with `compiler` for `target` with the specified
-/// compiletest `mode` and `suite` arguments. For example `mode` can be
-/// "run-pass" or `suite` can be something like `debuginfo`.
-pub fn compiletest(build: &Build,
-                   compiler: &Compiler,
-                   target: &str,
-                   mode: &str,
-                   suite: &str) {
-    println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
-    let mut cmd = build.tool_cmd(compiler, "compiletest");
-
-    // compiletest currently has... a lot of arguments, so let's just pass all
-    // of them!
-
-    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
-    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
-    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
-    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
-    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
-    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
-    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
-    cmd.arg("--mode").arg(mode);
-    cmd.arg("--target").arg(target);
-    cmd.arg("--host").arg(compiler.host);
-    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
-
-    let mut flags = vec!["-Crpath".to_string()];
-    if build.config.rust_optimize_tests {
-        flags.push("-O".to_string());
-    }
-    if build.config.rust_debuginfo_tests {
-        flags.push("-g".to_string());
-    }
-
-    let mut hostflags = build.rustc_flags(&compiler.host);
-    hostflags.extend(flags.clone());
-    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
-
-    let mut targetflags = build.rustc_flags(&target);
-    targetflags.extend(flags);
-    targetflags.push(format!("-Lnative={}",
-                             build.test_helpers_out(target).display()));
-    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
-
-    // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
-    let python_default = "python";
-    cmd.arg("--docck-python").arg(python_default);
-
-    if build.config.build.ends_with("apple-darwin") {
-        // Force /usr/bin/python on OSX for LLDB tests because we're loading the
-        // LLDB plugin's compiled module which only works with the system python
-        // (namely not Homebrew-installed python)
-        cmd.arg("--lldb-python").arg("/usr/bin/python");
-    } else {
-        cmd.arg("--lldb-python").arg(python_default);
-    }
-
-    if let Some(ref vers) = build.gdb_version {
-        cmd.arg("--gdb-version").arg(vers);
-    }
-    if let Some(ref vers) = build.lldb_version {
-        cmd.arg("--lldb-version").arg(vers);
-    }
-    if let Some(ref dir) = build.lldb_python_dir {
-        cmd.arg("--lldb-python-dir").arg(dir);
-    }
-
-    cmd.args(&build.flags.args);
-
-    if build.config.verbose || build.flags.verbose {
-        cmd.arg("--verbose");
-    }
-
-    // Only pass correct values for these flags for the `run-make` suite as it
-    // requires that a C++ compiler was configured which isn't always the case.
-    if suite == "run-make" {
-        let llvm_config = build.llvm_config(target);
-        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
-        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
-        cmd.arg("--cc").arg(build.cc(target))
-           .arg("--cxx").arg(build.cxx(target))
-           .arg("--cflags").arg(build.cflags(target).join(" "))
-           .arg("--llvm-components").arg(llvm_components.trim())
-           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
-    } else {
-        cmd.arg("--cc").arg("")
-           .arg("--cxx").arg("")
-           .arg("--cflags").arg("")
-           .arg("--llvm-components").arg("")
-           .arg("--llvm-cxxflags").arg("");
-    }
-
-    // Running a C compiler on MSVC requires a few env vars to be set, to be
-    // sure to set them here.
-    if target.contains("msvc") {
-        for &(ref k, ref v) in build.cc[target].0.env() {
-            if k != "PATH" {
-                cmd.env(k, v);
-            }
-        }
-    }
-    build.add_bootstrap_key(compiler, &mut cmd);
-
-    cmd.arg("--adb-path").arg("adb");
-    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
-    if target.contains("android") {
-        // Assume that cc for this target comes from the android sysroot
-        cmd.arg("--android-cross-path")
-           .arg(build.cc(target).parent().unwrap().parent().unwrap());
-    } else {
-        cmd.arg("--android-cross-path").arg("");
-    }
-
-    build.run(&mut cmd);
-}
-
-/// Run `rustdoc --test` for all documentation in `src/doc`.
-///
-/// This will run all tests in our markdown documentation (e.g. the book)
-/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
-/// `compiler`.
-pub fn docs(build: &Build, compiler: &Compiler) {
-    // Do a breadth-first traversal of the `src/doc` directory and just run
-    // tests for all files that end in `*.md`
-    let mut stack = vec![build.src.join("src/doc")];
-
-    while let Some(p) = stack.pop() {
-        if p.is_dir() {
-            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
-            continue
-        }
-
-        if p.extension().and_then(|s| s.to_str()) != Some("md") {
-            continue
-        }
-
-        println!("doc tests for: {}", p.display());
-        markdown_test(build, compiler, &p);
-    }
-}
-
-/// Run the error index generator tool to execute the tests located in the error
-/// index.
-///
-/// The `error_index_generator` tool lives in `src/tools` and is used to
-/// generate a markdown file from the error indexes of the code base which is
-/// then passed to `rustdoc --test`.
-pub fn error_index(build: &Build, compiler: &Compiler) {
-    println!("Testing error-index stage{}", compiler.stage);
-
-    let output = testdir(build, compiler.host).join("error-index.md");
-    build.run(build.tool_cmd(compiler, "error_index_generator")
-                   .arg("markdown")
-                   .arg(&output)
-                   .env("CFG_BUILD", &build.config.build));
-
-    markdown_test(build, compiler, &output);
-}
-
-fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
-    let mut cmd = Command::new(build.rustdoc(compiler));
-    build.add_rustc_lib_path(compiler, &mut cmd);
-    cmd.arg("--test");
-    cmd.arg(markdown);
-    cmd.arg("--test-args").arg(build.flags.args.join(" "));
-    build.run(&mut cmd);
-}
-
-/// Run all unit tests plus documentation tests for an entire crate DAG defined
-/// by a `Cargo.toml`
-///
-/// This is what runs tests for crates like the standard library, compiler, etc.
-/// It essentially is the driver for running `cargo test`.
-///
-/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
-/// arguments, and those arguments are discovered from `Cargo.lock`.
-pub fn krate(build: &Build,
-             compiler: &Compiler,
-             target: &str,
-             mode: Mode) {
-    let (name, path, features) = match mode {
-        Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()),
-        Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()),
-        Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()),
-        _ => panic!("can only test libraries"),
-    };
-    println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
-             compiler.host, target);
-
-    // Build up the base `cargo test` command.
-    let mut cargo = build.cargo(compiler, mode, target, "test");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(path).join("Cargo.toml"))
-         .arg("--features").arg(features);
-
-    // Generate a list of `-p` arguments to pass to the `cargo test` invocation
-    // by crawling the corresponding Cargo.lock file.
-    let lockfile = build.src.join(path).join("Cargo.lock");
-    let mut contents = String::new();
-    t!(t!(File::open(&lockfile)).read_to_string(&mut contents));
-    let mut lines = contents.lines();
-    while let Some(line) = lines.next() {
-        let prefix = "name = \"";
-        if !line.starts_with(prefix) {
-            continue
-        }
-        lines.next(); // skip `version = ...`
-
-        // skip crates.io or otherwise non-path crates
-        if let Some(line) = lines.next() {
-            if line.starts_with("source") {
-                continue
-            }
-        }
-
-        let crate_name = &line[prefix.len()..line.len() - 1];
-
-        // Right now jemalloc is our only target-specific crate in the sense
-        // that it's not present on all platforms. Custom skip it here for now,
-        // but if we add more this probably wants to get more generalized.
-        if crate_name.contains("jemalloc") {
-            continue
-        }
-
-        cargo.arg("-p").arg(crate_name);
-    }
-
-    // The tests are going to run with the *target* libraries, so we need to
-    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
-    //
-    // Note that to run the compiler we need to run with the *host* libraries,
-    // but our wrapper scripts arrange for that to be the case anyway.
-    let mut dylib_path = dylib_path();
-    dylib_path.insert(0, build.sysroot_libdir(compiler, target));
-    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
-    if target.contains("android") {
-        build.run(cargo.arg("--no-run"));
-        krate_android(build, compiler, target, mode);
-    } else {
-        cargo.args(&build.flags.args);
-        build.run(&mut cargo);
-    }
-}
-
-fn krate_android(build: &Build,
-                 compiler: &Compiler,
-                 target: &str,
-                 mode: Mode) {
-    let mut tests = Vec::new();
-    let out_dir = build.cargo_out(compiler, mode, target);
-    find_tests(&out_dir, target, &mut tests);
-    find_tests(&out_dir.join("deps"), target, &mut tests);
-
-    for test in tests {
-        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
-
-        let test_file_name = test.file_name().unwrap().to_string_lossy();
-        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
-                          ADB_TEST_DIR,
-                          compiler.stage,
-                          target,
-                          compiler.host,
-                          test_file_name);
-        let program = format!("(cd {dir}; \
-                                LD_LIBRARY_PATH=./{target} ./{test} \
-                                    --logfile {log} \
-                                    {args})",
-                              dir = ADB_TEST_DIR,
-                              target = target,
-                              test = test_file_name,
-                              log = log,
-                              args = build.flags.args.join(" "));
-
-        let output = output(Command::new("adb").arg("shell").arg(&program));
-        println!("{}", output);
-        build.run(Command::new("adb")
-                          .arg("pull")
-                          .arg(&log)
-                          .arg(build.out.join("tmp")));
-        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
-        if !output.contains("result: ok") {
-            panic!("some tests failed");
-        }
-    }
-}
-
-fn find_tests(dir: &Path,
-              target: &str,
-              dst: &mut Vec<PathBuf>) {
-    for e in t!(dir.read_dir()).map(|e| t!(e)) {
-        let file_type = t!(e.file_type());
-        if !file_type.is_file() {
-            continue
-        }
-        let filename = e.file_name().into_string().unwrap();
-        if (target.contains("windows") && filename.ends_with(".exe")) ||
-           (!target.contains("windows") && !filename.contains(".")) {
-            dst.push(e.path());
-        }
-    }
-}
-
-pub fn android_copy_libs(build: &Build,
-                         compiler: &Compiler,
-                         target: &str) {
-    println!("Android copy libs to emulator ({})", target);
-    build.run(Command::new("adb").arg("remount"));
-    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
-    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
-    build.run(Command::new("adb")
-                      .arg("push")
-                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
-                      .arg(ADB_TEST_DIR));
-
-    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
-    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
-
-    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
-        let f = t!(f);
-        let name = f.file_name().into_string().unwrap();
-        if util::is_dylib(&name) {
-            build.run(Command::new("adb")
-                              .arg("push")
-                              .arg(f.path())
-                              .arg(&target_dir));
-        }
-    }
-}
diff --git a/src/bootstrap/build/clean.rs b/src/bootstrap/build/clean.rs
deleted file mode 100644 (file)
index 91334bd..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of `make clean` in rustbuild.
-//!
-//! Responsible for cleaning out a build directory of all old and stale
-//! artifacts to prepare for a fresh build. Currently doesn't remove the
-//! `build/cache` directory (download cache) or the `build/$target/llvm`
-//! directory as we want that cached between builds.
-
-use std::fs;
-use std::path::Path;
-
-use build::Build;
-
-pub fn clean(build: &Build) {
-    rm_rf(build, "tmp".as_ref());
-    rm_rf(build, &build.out.join("tmp"));
-
-    for host in build.config.host.iter() {
-
-        let out = build.out.join(host);
-
-        rm_rf(build, &out.join("compiler-rt"));
-        rm_rf(build, &out.join("doc"));
-
-        for stage in 0..4 {
-            rm_rf(build, &out.join(format!("stage{}", stage)));
-            rm_rf(build, &out.join(format!("stage{}-std", stage)));
-            rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
-            rm_rf(build, &out.join(format!("stage{}-tools", stage)));
-            rm_rf(build, &out.join(format!("stage{}-test", stage)));
-        }
-    }
-}
-
-fn rm_rf(build: &Build, path: &Path) {
-    if path.exists() {
-        build.verbose(&format!("removing `{}`", path.display()));
-        t!(fs::remove_dir_all(path));
-    }
-}
diff --git a/src/bootstrap/build/compile.rs b/src/bootstrap/build/compile.rs
deleted file mode 100644 (file)
index 5ed9c1c..0000000
+++ /dev/null
@@ -1,360 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of compiling various phases of the compiler and standard
-//! library.
-//!
-//! This module contains some of the real meat in the rustbuild build system
-//! which is where Cargo is used to compiler the standard library, libtest, and
-//! compiler. This module is also responsible for assembling the sysroot as it
-//! goes along from the output of the previous stage.
-
-use std::collections::HashMap;
-use std::fs;
-use std::path::{Path, PathBuf};
-use std::process::Command;
-
-use build_helper::output;
-
-use build::util::{exe, staticlib, libdir, mtime, is_dylib, copy};
-use build::{Build, Compiler, Mode};
-
-/// Build the standard library.
-///
-/// This will build the standard library for a particular stage of the build
-/// using the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-
-    // Move compiler-rt into place as it'll be required by the compiler when
-    // building the standard library to link the dylib of libstd
-    let libdir = build.sysroot_libdir(compiler, target);
-    let _ = fs::remove_dir_all(&libdir);
-    t!(fs::create_dir_all(&libdir));
-    copy(&build.compiler_rt_built.borrow()[target],
-         &libdir.join(staticlib("compiler-rt", target)));
-
-    // Some platforms have startup objects that may be required to produce the
-    // libstd dynamic library, for example.
-    build_startup_objects(build, target, &libdir);
-
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
-    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
-    cargo.arg("--features").arg(build.std_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"));
-
-    if let Some(target) = build.config.target_config.get(target) {
-        if let Some(ref jemalloc) = target.jemalloc {
-            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
-        }
-    }
-    if let Some(ref p) = build.config.musl_root {
-        if target.contains("musl") {
-            cargo.env("MUSL_ROOT", p);
-        }
-    }
-
-    build.run(&mut cargo);
-    std_link(build, target, compiler, compiler.host);
-}
-
-/// Link all libstd rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn std_link(build: &Build,
-                target: &str,
-                compiler: &Compiler,
-                host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-
-    // If we're linking one compiler host's output into another, then we weren't
-    // called from the `std` method above. In that case we clean out what's
-    // already there and then also link compiler-rt into place.
-    if host != compiler.host {
-        let _ = fs::remove_dir_all(&libdir);
-        t!(fs::create_dir_all(&libdir));
-        copy(&build.compiler_rt_built.borrow()[target],
-             &libdir.join(staticlib("compiler-rt", target)));
-    }
-    add_to_sysroot(&out_dir, &libdir);
-
-    if target.contains("musl") &&
-       (target.contains("x86_64") || target.contains("i686")) {
-        copy_third_party_objects(build, target, &libdir);
-    }
-}
-
-/// Copies the crt(1,i,n).o startup objects
-///
-/// Only required for musl targets that statically link to libc
-fn copy_third_party_objects(build: &Build, target: &str, into: &Path) {
-    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
-        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
-    }
-}
-
-/// Build and prepare startup objects like rsbegin.o and rsend.o
-///
-/// These are primarily used on Windows right now for linking executables/dlls.
-/// They don't require any library support as they're just plain old object
-/// files, so we just use the nightly snapshot compiler to always build them (as
-/// no other compilers are guaranteed to be available).
-fn build_startup_objects(build: &Build, target: &str, into: &Path) {
-    if !target.contains("pc-windows-gnu") {
-        return
-    }
-    let compiler = Compiler::new(0, &build.config.build);
-    let compiler = build.compiler_path(&compiler);
-
-    for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
-        let file = t!(file);
-        build.run(Command::new(&compiler)
-                          .arg("--emit=obj")
-                          .arg("--out-dir").arg(into)
-                          .arg(file.path()));
-    }
-
-    for obj in ["crt2.o", "dllcrt2.o"].iter() {
-        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
-    }
-}
-
-/// Build libtest.
-///
-/// This will build libtest and supporting libraries for a particular stage of
-/// the build using the `compiler` targeting the `target` architecture. The
-/// artifacts created will also be linked into the sysroot directory.
-pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    build.clear_if_dirty(&out_dir, &libstd_shim(build, compiler, target));
-    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
-    build.run(&mut cargo);
-    test_link(build, target, compiler, compiler.host);
-}
-
-/// Link all libtest rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn test_link(build: &Build,
-                 target: &str,
-                 compiler: &Compiler,
-                 host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    add_to_sysroot(&out_dir, &libdir);
-}
-
-/// Build the compiler.
-///
-/// This will build the compiler for a particular stage of the build using
-/// the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} compiler artifacts ({} -> {})",
-             compiler.stage, compiler.host, target);
-
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    build.clear_if_dirty(&out_dir, &libtest_shim(build, compiler, target));
-
-    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
-    cargo.arg("--features").arg(build.rustc_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"));
-
-    // Set some configuration variables picked up by build scripts and
-    // the compiler alike
-    cargo.env("CFG_RELEASE", &build.release)
-         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
-         .env("CFG_VERSION", &build.version)
-         .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key)
-         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new()))
-         .env("CFG_LIBDIR_RELATIVE", "lib");
-
-    if let Some(ref ver_date) = build.ver_date {
-        cargo.env("CFG_VER_DATE", ver_date);
-    }
-    if let Some(ref ver_hash) = build.ver_hash {
-        cargo.env("CFG_VER_HASH", ver_hash);
-    }
-    if !build.unstable_features {
-        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
-    }
-    cargo.env("LLVM_CONFIG", build.llvm_config(target));
-    if build.config.llvm_static_stdcpp {
-        cargo.env("LLVM_STATIC_STDCPP",
-                  compiler_file(build.cxx(target), "libstdc++.a"));
-    }
-    if let Some(ref s) = build.config.rustc_default_linker {
-        cargo.env("CFG_DEFAULT_LINKER", s);
-    }
-    if let Some(ref s) = build.config.rustc_default_ar {
-        cargo.env("CFG_DEFAULT_AR", s);
-    }
-    build.run(&mut cargo);
-
-    rustc_link(build, target, compiler, compiler.host);
-}
-
-/// Link all librustc rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn rustc_link(build: &Build,
-                  target: &str,
-                  compiler: &Compiler,
-                  host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    add_to_sysroot(&out_dir, &libdir);
-}
-
-/// Cargo's output path for the standard library in a given stage, compiled
-/// by a particular compiler for the specified target.
-fn libstd_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libstd, target).join("libstd_shim.rlib")
-}
-
-/// Cargo's output path for libtest in a given stage, compiled by a particular
-/// compiler for the specified target.
-fn libtest_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libtest, target).join("libtest_shim.rlib")
-}
-
-fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
-    let out = output(Command::new(compiler)
-                            .arg(format!("-print-file-name={}", file)));
-    PathBuf::from(out.trim())
-}
-
-/// Prepare a new compiler from the artifacts in `stage`
-///
-/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
-/// must have been previously produced by the `stage - 1` build.config.build
-/// compiler.
-pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
-    assert!(stage > 0, "the stage0 compiler isn't assembled, it's downloaded");
-    // The compiler that we're assembling
-    let target_compiler = Compiler::new(stage, host);
-
-    // The compiler that compiled the compiler we're assembling
-    let build_compiler = Compiler::new(stage - 1, &build.config.build);
-
-    // Clear out old files
-    let sysroot = build.sysroot(&target_compiler);
-    let _ = fs::remove_dir_all(&sysroot);
-    t!(fs::create_dir_all(&sysroot));
-
-    // Link in all dylibs to the libdir
-    let sysroot_libdir = sysroot.join(libdir(host));
-    t!(fs::create_dir_all(&sysroot_libdir));
-    let src_libdir = build.sysroot_libdir(&build_compiler, host);
-    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
-        let filename = f.file_name().into_string().unwrap();
-        if is_dylib(&filename) {
-            copy(&f.path(), &sysroot_libdir.join(&filename));
-        }
-    }
-
-    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
-
-    // Link the compiler binary itself into place
-    let rustc = out_dir.join(exe("rustc", host));
-    let bindir = sysroot.join("bin");
-    t!(fs::create_dir_all(&bindir));
-    let compiler = build.compiler_path(&Compiler::new(stage, host));
-    let _ = fs::remove_file(&compiler);
-    copy(&rustc, &compiler);
-
-    // See if rustdoc exists to link it into place
-    let rustdoc = exe("rustdoc", host);
-    let rustdoc_src = out_dir.join(&rustdoc);
-    let rustdoc_dst = bindir.join(&rustdoc);
-    if fs::metadata(&rustdoc_src).is_ok() {
-        let _ = fs::remove_file(&rustdoc_dst);
-        copy(&rustdoc_src, &rustdoc_dst);
-    }
-}
-
-/// Link some files into a rustc sysroot.
-///
-/// For a particular stage this will link all of the contents of `out_dir`
-/// into the sysroot of the `host` compiler, assuming the artifacts are
-/// compiled for the specified `target`.
-fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
-    // Collect the set of all files in the dependencies directory, keyed
-    // off the name of the library. We assume everything is of the form
-    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
-    // `<hash>` values for the same name (of old builds).
-    let mut map = HashMap::new();
-    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
-        let filename = file.file_name().into_string().unwrap();
-
-        // We're only interested in linking rlibs + dylibs, other things like
-        // unit tests don't get linked in
-        if !filename.ends_with(".rlib") &&
-           !filename.ends_with(".lib") &&
-           !is_dylib(&filename) {
-            continue
-        }
-        let file = file.path();
-        let dash = filename.find("-").unwrap();
-        let key = (filename[..dash].to_string(),
-                   file.extension().unwrap().to_owned());
-        map.entry(key).or_insert(Vec::new())
-           .push(file.clone());
-    }
-
-    // For all hash values found, pick the most recent one to move into the
-    // sysroot, that should be the one we just built.
-    for (_, paths) in map {
-        let (_, path) = paths.iter().map(|path| {
-            (mtime(&path).seconds(), path)
-        }).max().unwrap();
-        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
-    }
-}
-
-/// Build a tool in `src/tools`
-///
-/// This will build the specified tool with the specified `host` compiler in
-/// `stage` into the normal cargo output directory.
-pub fn tool(build: &Build, stage: u32, host: &str, tool: &str) {
-    println!("Building stage{} tool {} ({})", stage, tool, host);
-
-    let compiler = Compiler::new(stage, host);
-
-    // FIXME: need to clear out previous tool and ideally deps, may require
-    //        isolating output directories or require a pseudo shim step to
-    //        clear out all the info.
-    //
-    //        Maybe when libstd is compiled it should clear out the rustc of the
-    //        corresponding stage?
-    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
-    // build.clear_if_dirty(&out_dir, &libstd_shim(build, stage, &host, target));
-
-    let mut cargo = build.cargo(&compiler, Mode::Tool, host, "build");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(format!("src/tools/{}/Cargo.toml", tool)));
-    build.run(&mut cargo);
-}
diff --git a/src/bootstrap/build/config.rs b/src/bootstrap/build/config.rs
deleted file mode 100644 (file)
index 498196e..0000000
+++ /dev/null
@@ -1,396 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Serialized configuration of a build.
-//!
-//! This module implements parsing `config.mk` and `config.toml` configuration
-//! files to tweak how the build runs.
-
-use std::collections::HashMap;
-use std::env;
-use std::fs::File;
-use std::io::prelude::*;
-use std::path::PathBuf;
-use std::process;
-
-use num_cpus;
-use rustc_serialize::Decodable;
-use toml::{Parser, Decoder, Value};
-
-/// Global configuration for the entire build and/or bootstrap.
-///
-/// This structure is derived from a combination of both `config.toml` and
-/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
-/// is used all that much, so this is primarily filled out by `config.mk` which
-/// is generated from `./configure`.
-///
-/// Note that this structure is not decoded directly into, but rather it is
-/// filled out from the decoded forms of the structs below. For documentation
-/// each field, see the corresponding fields in
-/// `src/bootstrap/config.toml.example`.
-#[derive(Default)]
-pub struct Config {
-    pub ccache: bool,
-    pub ninja: bool,
-    pub verbose: bool,
-    pub submodules: bool,
-    pub compiler_docs: bool,
-    pub docs: bool,
-    pub target_config: HashMap<String, Target>,
-
-    // llvm codegen options
-    pub llvm_assertions: bool,
-    pub llvm_optimize: bool,
-    pub llvm_version_check: bool,
-    pub llvm_static_stdcpp: bool,
-
-    // rust codegen options
-    pub rust_optimize: bool,
-    pub rust_codegen_units: u32,
-    pub rust_debug_assertions: bool,
-    pub rust_debuginfo: bool,
-    pub rust_rpath: bool,
-    pub rustc_default_linker: Option<String>,
-    pub rustc_default_ar: Option<String>,
-    pub rust_optimize_tests: bool,
-    pub rust_debuginfo_tests: bool,
-
-    pub build: String,
-    pub host: Vec<String>,
-    pub target: Vec<String>,
-    pub rustc: Option<PathBuf>,
-    pub cargo: Option<PathBuf>,
-    pub local_rebuild: bool,
-
-    // libstd features
-    pub debug_jemalloc: bool,
-    pub use_jemalloc: bool,
-
-    // misc
-    pub channel: String,
-    pub musl_root: Option<PathBuf>,
-    pub prefix: Option<String>,
-}
-
-/// Per-target configuration stored in the global configuration structure.
-#[derive(Default)]
-pub struct Target {
-    pub llvm_config: Option<PathBuf>,
-    pub jemalloc: Option<PathBuf>,
-    pub cc: Option<PathBuf>,
-    pub cxx: Option<PathBuf>,
-    pub ndk: Option<PathBuf>,
-}
-
-/// Structure of the `config.toml` file that configuration is read from.
-///
-/// This structure uses `Decodable` to automatically decode a TOML configuration
-/// file into this format, and then this is traversed and written into the above
-/// `Config` structure.
-#[derive(RustcDecodable, Default)]
-struct TomlConfig {
-    build: Option<Build>,
-    llvm: Option<Llvm>,
-    rust: Option<Rust>,
-    target: Option<HashMap<String, TomlTarget>>,
-}
-
-/// TOML representation of various global build decisions.
-#[derive(RustcDecodable, Default, Clone)]
-struct Build {
-    build: Option<String>,
-    host: Vec<String>,
-    target: Vec<String>,
-    cargo: Option<String>,
-    rustc: Option<String>,
-    compiler_docs: Option<bool>,
-    docs: Option<bool>,
-}
-
-/// TOML representation of how the LLVM build is configured.
-#[derive(RustcDecodable, Default)]
-struct Llvm {
-    ccache: Option<bool>,
-    ninja: Option<bool>,
-    assertions: Option<bool>,
-    optimize: Option<bool>,
-    version_check: Option<bool>,
-    static_libstdcpp: Option<bool>,
-}
-
-/// TOML representation of how the Rust build is configured.
-#[derive(RustcDecodable, Default)]
-struct Rust {
-    optimize: Option<bool>,
-    codegen_units: Option<u32>,
-    debug_assertions: Option<bool>,
-    debuginfo: Option<bool>,
-    debug_jemalloc: Option<bool>,
-    use_jemalloc: Option<bool>,
-    default_linker: Option<String>,
-    default_ar: Option<String>,
-    channel: Option<String>,
-    musl_root: Option<String>,
-    rpath: Option<bool>,
-    optimize_tests: Option<bool>,
-    debuginfo_tests: Option<bool>,
-}
-
-/// TOML representation of how each build target is configured.
-#[derive(RustcDecodable, Default)]
-struct TomlTarget {
-    llvm_config: Option<String>,
-    jemalloc: Option<String>,
-    cc: Option<String>,
-    cxx: Option<String>,
-    android_ndk: Option<String>,
-}
-
-impl Config {
-    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
-        let mut config = Config::default();
-        config.llvm_optimize = true;
-        config.use_jemalloc = true;
-        config.rust_optimize = true;
-        config.rust_optimize_tests = true;
-        config.submodules = true;
-        config.docs = true;
-        config.rust_rpath = true;
-        config.rust_codegen_units = 1;
-        config.build = build.to_string();
-        config.channel = "dev".to_string();
-
-        let toml = file.map(|file| {
-            let mut f = t!(File::open(&file));
-            let mut toml = String::new();
-            t!(f.read_to_string(&mut toml));
-            let mut p = Parser::new(&toml);
-            let table = match p.parse() {
-                Some(table) => table,
-                None => {
-                    println!("failed to parse TOML configuration:");
-                    for err in p.errors.iter() {
-                        let (loline, locol) = p.to_linecol(err.lo);
-                        let (hiline, hicol) = p.to_linecol(err.hi);
-                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
-                                 hicol, err.desc);
-                    }
-                    process::exit(2);
-                }
-            };
-            let mut d = Decoder::new(Value::Table(table));
-            match Decodable::decode(&mut d) {
-                Ok(cfg) => cfg,
-                Err(e) => {
-                    println!("failed to decode TOML: {}", e);
-                    process::exit(2);
-                }
-            }
-        }).unwrap_or_else(|| TomlConfig::default());
-
-        let build = toml.build.clone().unwrap_or(Build::default());
-        set(&mut config.build, build.build.clone());
-        config.host.push(config.build.clone());
-        for host in build.host.iter() {
-            if !config.host.contains(host) {
-                config.host.push(host.clone());
-            }
-        }
-        for target in config.host.iter().chain(&build.target) {
-            if !config.target.contains(target) {
-                config.target.push(target.clone());
-            }
-        }
-        config.rustc = build.rustc.map(PathBuf::from);
-        config.cargo = build.cargo.map(PathBuf::from);
-        set(&mut config.compiler_docs, build.compiler_docs);
-        set(&mut config.docs, build.docs);
-
-        if let Some(ref llvm) = toml.llvm {
-            set(&mut config.ccache, llvm.ccache);
-            set(&mut config.ninja, llvm.ninja);
-            set(&mut config.llvm_assertions, llvm.assertions);
-            set(&mut config.llvm_optimize, llvm.optimize);
-            set(&mut config.llvm_version_check, llvm.version_check);
-            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
-        }
-        if let Some(ref rust) = toml.rust {
-            set(&mut config.rust_debug_assertions, rust.debug_assertions);
-            set(&mut config.rust_debuginfo, rust.debuginfo);
-            set(&mut config.rust_optimize, rust.optimize);
-            set(&mut config.rust_optimize_tests, rust.optimize_tests);
-            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
-            set(&mut config.rust_rpath, rust.rpath);
-            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
-            set(&mut config.use_jemalloc, rust.use_jemalloc);
-            set(&mut config.channel, rust.channel.clone());
-            config.rustc_default_linker = rust.default_linker.clone();
-            config.rustc_default_ar = rust.default_ar.clone();
-            config.musl_root = rust.musl_root.clone().map(PathBuf::from);
-
-            match rust.codegen_units {
-                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
-                Some(n) => config.rust_codegen_units = n,
-                None => {}
-            }
-        }
-
-        if let Some(ref t) = toml.target {
-            for (triple, cfg) in t {
-                let mut target = Target::default();
-
-                if let Some(ref s) = cfg.llvm_config {
-                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
-                }
-                if let Some(ref s) = cfg.jemalloc {
-                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
-                }
-                if let Some(ref s) = cfg.android_ndk {
-                    target.ndk = Some(env::current_dir().unwrap().join(s));
-                }
-                target.cxx = cfg.cxx.clone().map(PathBuf::from);
-                target.cc = cfg.cc.clone().map(PathBuf::from);
-
-                config.target_config.insert(triple.clone(), target);
-            }
-        }
-
-        return config
-    }
-
-    /// "Temporary" routine to parse `config.mk` into this configuration.
-    ///
-    /// While we still have `./configure` this implements the ability to decode
-    /// that configuration into this. This isn't exactly a full-blown makefile
-    /// parser, but hey it gets the job done!
-    pub fn update_with_config_mk(&mut self) {
-        let mut config = String::new();
-        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
-        for line in config.lines() {
-            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
-            let key = parts.next().unwrap();
-            let value = match parts.next() {
-                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
-                Some(n) => n,
-                None => continue
-            };
-
-            macro_rules! check {
-                ($(($name:expr, $val:expr),)*) => {
-                    if value == "1" {
-                        $(
-                            if key == concat!("CFG_ENABLE_", $name) {
-                                $val = true;
-                                continue
-                            }
-                            if key == concat!("CFG_DISABLE_", $name) {
-                                $val = false;
-                                continue
-                            }
-                        )*
-                    }
-                }
-            }
-
-            check! {
-                ("CCACHE", self.ccache),
-                ("MANAGE_SUBMODULES", self.submodules),
-                ("COMPILER_DOCS", self.compiler_docs),
-                ("DOCS", self.docs),
-                ("LLVM_ASSERTIONS", self.llvm_assertions),
-                ("OPTIMIZE_LLVM", self.llvm_optimize),
-                ("LLVM_VERSION_CHECK", self.llvm_version_check),
-                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
-                ("OPTIMIZE", self.rust_optimize),
-                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
-                ("DEBUGINFO", self.rust_debuginfo),
-                ("JEMALLOC", self.use_jemalloc),
-                ("DEBUG_JEMALLOC", self.debug_jemalloc),
-                ("RPATH", self.rust_rpath),
-                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
-                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
-                ("LOCAL_REBUILD", self.local_rebuild),
-            }
-
-            match key {
-                "CFG_BUILD" => self.build = value.to_string(),
-                "CFG_HOST" => {
-                    self.host = value.split(" ").map(|s| s.to_string())
-                                     .collect();
-                }
-                "CFG_TARGET" => {
-                    self.target = value.split(" ").map(|s| s.to_string())
-                                       .collect();
-                }
-                "CFG_MUSL_ROOT" if value.len() > 0 => {
-                    self.musl_root = Some(PathBuf::from(value));
-                }
-                "CFG_DEFAULT_AR" if value.len() > 0 => {
-                    self.rustc_default_ar = Some(value.to_string());
-                }
-                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
-                    self.rustc_default_linker = Some(value.to_string());
-                }
-                "CFG_RELEASE_CHANNEL" => {
-                    self.channel = value.to_string();
-                }
-                "CFG_PREFIX" => {
-                    self.prefix = Some(value.to_string());
-                }
-                "CFG_LLVM_ROOT" if value.len() > 0 => {
-                    let target = self.target_config.entry(self.build.clone())
-                                     .or_insert(Target::default());
-                    let root = PathBuf::from(value);
-                    target.llvm_config = Some(root.join("bin/llvm-config"));
-                }
-                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
-                    let target = self.target_config.entry(self.build.clone())
-                                     .or_insert(Target::default());
-                    target.jemalloc = Some(PathBuf::from(value));
-                }
-                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "arm-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "armv7-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "i686-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "aarch64-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
-                    self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
-                    self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
-                }
-                _ => {}
-            }
-        }
-    }
-}
-
-fn set<T>(field: &mut T, val: Option<T>) {
-    if let Some(v) = val {
-        *field = v;
-    }
-}
diff --git a/src/bootstrap/build/dist.rs b/src/bootstrap/build/dist.rs
deleted file mode 100644 (file)
index 6eed7ea..0000000
+++ /dev/null
@@ -1,319 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of the various distribution aspects of the compiler.
-//!
-//! This module is responsible for creating tarballs of the standard library,
-//! compiler, and documentation. This ends up being what we distribute to
-//! everyone as well.
-//!
-//! No tarball is actually created literally in this file, but rather we shell
-//! out to `rust-installer` still. This may one day be replaced with bits and
-//! pieces of `rustup.rs`!
-
-use std::fs::{self, File};
-use std::io::Write;
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build::{Build, Compiler};
-use build::util::{cp_r, libdir, is_dylib};
-
-fn package_vers(build: &Build) -> &str {
-    match &build.config.channel[..] {
-        "stable" => &build.release,
-        "beta" => "beta",
-        "nightly" => "nightly",
-        _ => &build.release,
-    }
-}
-
-fn distdir(build: &Build) -> PathBuf {
-    build.out.join("dist")
-}
-
-fn tmpdir(build: &Build) -> PathBuf {
-    build.out.join("tmp/dist")
-}
-
-/// Builds the `rust-docs` installer component.
-///
-/// Slurps up documentation from the `stage`'s `host`.
-pub fn docs(build: &Build, stage: u32, host: &str) {
-    println!("Dist docs stage{} ({})", stage, host);
-    let name = format!("rust-docs-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, name));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("share/doc/rust/html");
-    t!(fs::create_dir_all(&dst));
-    let src = build.out.join(host).join("doc");
-    cp_r(&src, &dst);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust-Documentation")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-documentation-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-docs")
-       .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg("--bulk-dirs=share/doc/rust/html");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-
-    // As part of this step, *also* copy the docs directory to a directory which
-    // buildbot typically uploads.
-    if host == build.config.build {
-        let dst = distdir(build).join("doc").join(&build.package_vers);
-        t!(fs::create_dir_all(&dst));
-        cp_r(&src, &dst);
-    }
-}
-
-/// Build the `rust-mingw` installer component.
-///
-/// This contains all the bits and pieces to run the MinGW Windows targets
-/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
-/// Currently just shells out to a python script, but that should be rewritten
-/// in Rust.
-pub fn mingw(build: &Build, host: &str) {
-    println!("Dist mingw ({})", host);
-    let name = format!("rust-mingw-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-
-    // The first argument to the script is a "temporary directory" which is just
-    // thrown away (this contains the runtime DLLs included in the rustc package
-    // above) and the second argument is where to place all the MinGW components
-    // (which is what we want).
-    //
-    // FIXME: this script should be rewritten into Rust
-    let mut cmd = Command::new("python");
-    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
-       .arg(tmpdir(build))
-       .arg(&image)
-       .arg(host);
-    build.run(&mut cmd);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust-MinGW")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-MinGW-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-mingw")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-}
-
-/// Creates the `rustc` installer component.
-pub fn rustc(build: &Build, stage: u32, host: &str) {
-    println!("Dist rustc stage{} ({})", stage, host);
-    let name = format!("rustc-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
-    let _ = fs::remove_dir_all(&overlay);
-
-    // Prepare the rustc "image", what will actually end up getting installed
-    prepare_image(build, stage, host, &image);
-
-    // Prepare the overlay which is part of the tarball but won't actually be
-    // installed
-    let cp = |file: &str| {
-        install(&build.src.join(file), &overlay, 0o644);
-    };
-    cp("COPYRIGHT");
-    cp("LICENSE-APACHE");
-    cp("LICENSE-MIT");
-    cp("README.md");
-    // tiny morsel of metadata is used by rust-packaging
-    let version = &build.version;
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // On MinGW we've got a few runtime DLL dependencies that we need to
-    // include. The first argument to this script is where to put these DLLs
-    // (the image we're creating), and the second argument is a junk directory
-    // to ignore all other MinGW stuff the script creates.
-    //
-    // On 32-bit MinGW we're always including a DLL which needs some extra
-    // licenses to distribute. On 64-bit MinGW we don't actually distribute
-    // anything requiring us to distribute a license, but it's likely the
-    // install will *also* include the rust-mingw package, which also needs
-    // licenses, so to be safe we just include it here in all MinGW packages.
-    //
-    // FIXME: this script should be rewritten into Rust
-    if host.contains("pc-windows-gnu") {
-        let mut cmd = Command::new("python");
-        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
-           .arg(&image)
-           .arg(tmpdir(build))
-           .arg(host);
-        build.run(&mut cmd);
-
-        let dst = image.join("share/doc");
-        t!(fs::create_dir_all(&dst));
-        cp_r(&build.src.join("src/etc/third-party"), &dst);
-    }
-
-    // Finally, wrap everything up in a nice tarball!
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rustc")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-    t!(fs::remove_dir_all(&overlay));
-
-    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
-        let src = build.sysroot(&Compiler::new(stage, host));
-        let libdir = libdir(host);
-
-        // Copy rustc/rustdoc binaries
-        t!(fs::create_dir_all(image.join("bin")));
-        cp_r(&src.join("bin"), &image.join("bin"));
-
-        // Copy runtime DLLs needed by the compiler
-        if libdir != "bin" {
-            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
-                let name = entry.file_name();
-                if let Some(s) = name.to_str() {
-                    if is_dylib(s) {
-                        install(&entry.path(), &image.join(libdir), 0o644);
-                    }
-                }
-            }
-        }
-
-        // Man pages
-        t!(fs::create_dir_all(image.join("share/man/man1")));
-        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
-
-        // Debugger scripts
-        debugger_scripts(build, &image, host);
-
-        // Misc license info
-        let cp = |file: &str| {
-            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
-        };
-        cp("COPYRIGHT");
-        cp("LICENSE-APACHE");
-        cp("LICENSE-MIT");
-        cp("README.md");
-    }
-}
-
-/// Copies debugger scripts for `host` into the `sysroot` specified.
-pub fn debugger_scripts(build: &Build,
-                        sysroot: &Path,
-                        host: &str) {
-    let cp_debugger_script = |file: &str| {
-        let dst = sysroot.join("lib/rustlib/etc");
-        t!(fs::create_dir_all(&dst));
-        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
-    };
-    if host.contains("windows-msvc") {
-        // no debugger scripts
-    } else {
-        cp_debugger_script("debugger_pretty_printers_common.py");
-
-        // gdb debugger scripts
-        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
-                0o755);
-
-        cp_debugger_script("gdb_load_rust_pretty_printers.py");
-        cp_debugger_script("gdb_rust_pretty_printing.py");
-
-        // lldb debugger scripts
-        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
-                0o755);
-
-        cp_debugger_script("lldb_rust_formatters.py");
-    }
-}
-
-/// Creates the `rust-std` installer component as compiled by `compiler` for the
-/// target `target`.
-pub fn std(build: &Build, compiler: &Compiler, target: &str) {
-    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
-             target);
-    let name = format!("rust-std-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("lib/rustlib").join(target);
-    t!(fs::create_dir_all(&dst));
-    let src = build.sysroot(compiler).join("lib/rustlib");
-    cp_r(&src.join(target), &dst);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=std-is-standing-at-the-ready.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg(format!("--component-name=rust-std-{}", target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-}
-
-fn install(src: &Path, dstdir: &Path, perms: u32) {
-    let dst = dstdir.join(src.file_name().unwrap());
-    t!(fs::create_dir_all(dstdir));
-    t!(fs::copy(src, &dst));
-    chmod(&dst, perms);
-}
-
-#[cfg(unix)]
-fn chmod(path: &Path, perms: u32) {
-    use std::os::unix::fs::*;
-    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
-}
-#[cfg(windows)]
-fn chmod(_path: &Path, _perms: u32) {}
-
-// We have to run a few shell scripts, which choke quite a bit on both `\`
-// characters and on `C:\` paths, so normalize both of them away.
-fn sanitize_sh(path: &Path) -> String {
-    let path = path.to_str().unwrap().replace("\\", "/");
-    return change_drive(&path).unwrap_or(path);
-
-    fn change_drive(s: &str) -> Option<String> {
-        let mut ch = s.chars();
-        let drive = ch.next().unwrap_or('C');
-        if ch.next() != Some(':') {
-            return None
-        }
-        if ch.next() != Some('/') {
-            return None
-        }
-        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
-    }
-}
diff --git a/src/bootstrap/build/doc.rs b/src/bootstrap/build/doc.rs
deleted file mode 100644 (file)
index f7cc742..0000000
+++ /dev/null
@@ -1,207 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Documentation generation for rustbuild.
-//!
-//! This module implements generation for all bits and pieces of documentation
-//! for the Rust project. This notably includes suites like the rust book, the
-//! nomicon, standalone documentation, etc.
-//!
-//! Everything here is basically just a shim around calling either `rustbook` or
-//! `rustdoc`.
-
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::path::Path;
-use std::process::Command;
-
-use build::{Build, Compiler, Mode};
-use build::util::{up_to_date, cp_r};
-
-/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
-/// `name` into the `out` path.
-///
-/// This will not actually generate any documentation if the documentation has
-/// already been generated.
-pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {
-    t!(fs::create_dir_all(out));
-
-    let out = out.join(name);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let src = build.src.join("src/doc").join(name);
-    let index = out.join("index.html");
-    let rustbook = build.tool(&compiler, "rustbook");
-    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
-        return
-    }
-    println!("Rustbook stage{} ({}) - {}", stage, target, name);
-    let _ = fs::remove_dir_all(&out);
-    build.run(build.tool_cmd(&compiler, "rustbook")
-                   .arg("build")
-                   .arg(&src)
-                   .arg(out));
-}
-
-/// Generates all standalone documentation as compiled by the rustdoc in `stage`
-/// for the `target` into `out`.
-///
-/// This will list all of `src/doc` looking for markdown files and appropriately
-/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
-/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
-///
-/// In the end, this is just a glorified wrapper around rustdoc!
-pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} standalone ({})", stage, target);
-    t!(fs::create_dir_all(out));
-
-    let compiler = Compiler::new(stage, &build.config.build);
-
-    let favicon = build.src.join("src/doc/favicon.inc");
-    let footer = build.src.join("src/doc/footer.inc");
-    let full_toc = build.src.join("src/doc/full-toc.inc");
-    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
-
-    let version_input = build.src.join("src/doc/version_info.html.template");
-    let version_info = out.join("version_info.html");
-
-    if !up_to_date(&version_input, &version_info) {
-        let mut info = String::new();
-        t!(t!(File::open(&version_input)).read_to_string(&mut info));
-        let blank = String::new();
-        let short = build.short_ver_hash.as_ref().unwrap_or(&blank);
-        let hash = build.ver_hash.as_ref().unwrap_or(&blank);
-        let info = info.replace("VERSION", &build.release)
-                       .replace("SHORT_HASH", short)
-                       .replace("STAMP", hash);
-        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
-    }
-
-    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
-        let file = t!(file);
-        let path = file.path();
-        let filename = path.file_name().unwrap().to_str().unwrap();
-        if !filename.ends_with(".md") || filename == "README.md" {
-            continue
-        }
-
-        let html = out.join(filename).with_extension("html");
-        let rustdoc = build.rustdoc(&compiler);
-        if up_to_date(&path, &html) &&
-           up_to_date(&footer, &html) &&
-           up_to_date(&favicon, &html) &&
-           up_to_date(&full_toc, &html) &&
-           up_to_date(&version_info, &html) &&
-           up_to_date(&rustdoc, &html) {
-            continue
-        }
-
-        let mut cmd = Command::new(&rustdoc);
-        build.add_rustc_lib_path(&compiler, &mut cmd);
-        cmd.arg("--html-after-content").arg(&footer)
-           .arg("--html-before-content").arg(&version_info)
-           .arg("--html-in-header").arg(&favicon)
-           .arg("--markdown-playground-url")
-           .arg("https://play.rust-lang.org/")
-           .arg("-o").arg(out)
-           .arg(&path);
-
-        if filename == "reference.md" {
-           cmd.arg("--html-in-header").arg(&full_toc);
-        }
-
-        if filename == "not_found.md" {
-            cmd.arg("--markdown-no-toc")
-               .arg("--markdown-css")
-               .arg("https://doc.rust-lang.org/rust.css");
-        } else {
-            cmd.arg("--markdown-css").arg("rust.css");
-        }
-        build.run(&mut cmd);
-    }
-}
-
-/// Compile all standard library documentation.
-///
-/// This will generate all documentation for the standard library and its
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} std ({})", stage, target);
-    t!(fs::create_dir_all(out));
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Libstd)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    build.clear_if_dirty(&out_dir, &rustdoc);
-
-    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"))
-         .arg("--features").arg(build.std_features());
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Compile all libtest documentation.
-///
-/// This will generate all documentation for libtest and its dependencies. This
-/// is largely just a wrapper around `cargo doc`.
-pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} test ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Libtest)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    build.clear_if_dirty(&out_dir, &rustdoc);
-
-    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Generate all compiler documentation.
-///
-/// This will generate all documentation for the compiler libraries and their
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} compiler ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Librustc)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-    if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) {
-        t!(fs::remove_dir_all(&out_dir));
-    }
-    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"))
-         .arg("--features").arg(build.rustc_features());
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Generates the HTML rendered error-index by running the
-/// `error_index_generator` tool.
-pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} error index ({})", stage, target);
-    t!(fs::create_dir_all(out));
-    let compiler = Compiler::new(stage, &build.config.build);
-    let mut index = build.tool_cmd(&compiler, "error_index_generator");
-    index.arg("html");
-    index.arg(out.join("error-index.html"));
-
-    // FIXME: shouldn't have to pass this env var
-    index.env("CFG_BUILD", &build.config.build);
-
-    build.run(&mut index);
-}
diff --git a/src/bootstrap/build/flags.rs b/src/bootstrap/build/flags.rs
deleted file mode 100644 (file)
index d925997..0000000
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Command-line interface of the rustbuild build system.
-//!
-//! This module implements the command-line parsing of the build system which
-//! has various flags to configure how it's run.
-
-use std::fs;
-use std::path::PathBuf;
-use std::process;
-use std::slice;
-
-use getopts::Options;
-
-/// Deserialized version of all flags for this compile.
-pub struct Flags {
-    pub verbose: bool,
-    pub stage: Option<u32>,
-    pub build: String,
-    pub host: Filter,
-    pub target: Filter,
-    pub step: Vec<String>,
-    pub config: Option<PathBuf>,
-    pub src: Option<PathBuf>,
-    pub jobs: Option<u32>,
-    pub args: Vec<String>,
-    pub clean: bool,
-}
-
-pub struct Filter {
-    values: Vec<String>,
-}
-
-impl Flags {
-    pub fn parse(args: &[String]) -> Flags {
-        let mut opts = Options::new();
-        opts.optflag("v", "verbose", "use verbose output");
-        opts.optopt("", "config", "TOML configuration file for build", "FILE");
-        opts.optmulti("", "host", "host targets to build", "HOST");
-        opts.reqopt("", "build", "build target of the stage0 compiler", "BUILD");
-        opts.optmulti("", "target", "targets to build", "TARGET");
-        opts.optmulti("s", "step", "build step to execute", "STEP");
-        opts.optopt("", "stage", "stage to build", "N");
-        opts.optopt("", "src", "path to repo root", "DIR");
-        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
-        opts.optflag("", "clean", "clean output directory");
-        opts.optflag("h", "help", "print this help message");
-
-        let usage = |n| -> ! {
-            let brief = format!("Usage: rust.py [options]");
-            print!("{}", opts.usage(&brief));
-            process::exit(n);
-        };
-
-        let m = opts.parse(args).unwrap_or_else(|e| {
-            println!("failed to parse options: {}", e);
-            usage(1);
-        });
-        if m.opt_present("h") {
-            usage(0);
-        }
-
-        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
-            if fs::metadata("config.toml").is_ok() {
-                Some(PathBuf::from("config.toml"))
-            } else {
-                None
-            }
-        });
-
-        Flags {
-            verbose: m.opt_present("v"),
-            clean: m.opt_present("clean"),
-            stage: m.opt_str("stage").map(|j| j.parse().unwrap()),
-            build: m.opt_str("build").unwrap(),
-            host: Filter { values: m.opt_strs("host") },
-            target: Filter { values: m.opt_strs("target") },
-            step: m.opt_strs("step"),
-            config: cfg_file,
-            src: m.opt_str("src").map(PathBuf::from),
-            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
-            args: m.free.clone(),
-        }
-    }
-}
-
-impl Filter {
-    pub fn contains(&self, name: &str) -> bool {
-        self.values.len() == 0 || self.values.iter().any(|s| s == name)
-    }
-
-    pub fn iter(&self) -> slice::Iter<String> {
-        self.values.iter()
-    }
-}
diff --git a/src/bootstrap/build/job.rs b/src/bootstrap/build/job.rs
deleted file mode 100644 (file)
index 4558e6f..0000000
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Job management on Windows for bootstrapping
-//!
-//! Most of the time when you're running a build system (e.g. make) you expect
-//! Ctrl-C or abnormal termination to actually terminate the entire tree of
-//! process in play, not just the one at the top. This currently works "by
-//! default" on Unix platforms because Ctrl-C actually sends a signal to the
-//! *process group* rather than the parent process, so everything will get torn
-//! down. On Windows, however, this does not happen and Ctrl-C just kills the
-//! parent process.
-//!
-//! To achieve the same semantics on Windows we use Job Objects to ensure that
-//! all processes die at the same time. Job objects have a mode of operation
-//! where when all handles to the object are closed it causes all child
-//! processes associated with the object to be terminated immediately.
-//! Conveniently whenever a process in the job object spawns a new process the
-//! child will be associated with the job object as well. This means if we add
-//! ourselves to the job object we create then everything will get torn down!
-//!
-//! Unfortunately most of the time the build system is actually called from a
-//! python wrapper (which manages things like building the build system) so this
-//! all doesn't quite cut it so far. To go the last mile we duplicate the job
-//! object handle into our parent process (a python process probably) and then
-//! close our own handle. This means that the only handle to the job object
-//! resides in the parent python process, so when python dies the whole build
-//! system dies (as one would probably expect!).
-//!
-//! Note that this module has a #[cfg(windows)] above it as none of this logic
-//! is required on Unix.
-
-extern crate kernel32;
-extern crate winapi;
-
-use std::env;
-use std::io;
-use std::mem;
-
-use self::winapi::*;
-use self::kernel32::*;
-
-pub unsafe fn setup() {
-    // Create a new job object for us to use
-    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
-    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
-
-    // Indicate that when all handles to the job object are gone that all
-    // process in the object should be killed. Note that this includes our
-    // entire process tree by default because we've added ourselves and our
-    // children will reside in the job by default.
-    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
-    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
-    let r = SetInformationJobObject(job,
-                                    JobObjectExtendedLimitInformation,
-                                    &mut info as *mut _ as LPVOID,
-                                    mem::size_of_val(&info) as DWORD);
-    assert!(r != 0, "{}", io::Error::last_os_error());
-
-    // Assign our process to this job object. Note that if this fails, one very
-    // likely reason is that we are ourselves already in a job object! This can
-    // happen on the build bots that we've got for Windows, or if just anyone
-    // else is instrumenting the build. In this case we just bail out
-    // immediately and assume that they take care of it.
-    //
-    // Also note that nested jobs (why this might fail) are supported in recent
-    // versions of Windows, but the version of Windows that our bots are running
-    // at least don't support nested job objects.
-    let r = AssignProcessToJobObject(job, GetCurrentProcess());
-    if r == 0 {
-        CloseHandle(job);
-        return
-    }
-
-    // If we've got a parent process (e.g. the python script that called us)
-    // then move ownership of this job object up to them. That way if the python
-    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
-    //
-    // If we don't have a parent (e.g. this was run directly) then we
-    // intentionally leak the job object handle. When our process exits
-    // (normally or abnormally) it will close the handle implicitly, causing all
-    // processes in the job to be cleaned up.
-    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
-        Ok(s) => s,
-        Err(..) => return,
-    };
-
-    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
-    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
-    let mut parent_handle = 0 as *mut _;
-    let r = DuplicateHandle(GetCurrentProcess(), job,
-                            parent, &mut parent_handle,
-                            0, FALSE, DUPLICATE_SAME_ACCESS);
-
-    // If this failed, well at least we tried! An example of DuplicateHandle
-    // failing in the past has been when the wrong python2 package spawed this
-    // build system (e.g. the `python2` package in MSYS instead of
-    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
-    // mode" here is that we only clean everything up when the build system
-    // dies, not when the python parent does, so not too bad.
-    if r != 0 {
-        CloseHandle(job);
-    }
-}
diff --git a/src/bootstrap/build/mod.rs b/src/bootstrap/build/mod.rs
deleted file mode 100644 (file)
index 195d1bc..0000000
+++ /dev/null
@@ -1,871 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of rustbuild, the Rust build system.
-//!
-//! This module, and its descendants, are the implementation of the Rust build
-//! system. Most of this build system is backed by Cargo but the outer layer
-//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
-//! builds, building artifacts like LLVM, etc.
-//!
-//! More documentation can be found in each respective module below.
-
-use std::cell::RefCell;
-use std::collections::HashMap;
-use std::env;
-use std::fs::{self, File};
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build_helper::{run_silent, output};
-use gcc;
-use num_cpus;
-
-use build::util::{exe, mtime, libdir, add_lib_path};
-
-/// A helper macro to `unwrap` a result except also print out details like:
-///
-/// * The file/line of the panic
-/// * The expression that failed
-/// * The error itself
-///
-/// This is currently used judiciously throughout the build system rather than
-/// using a `Result` with `try!`, but this may change on day...
-macro_rules! t {
-    ($e:expr) => (match $e {
-        Ok(e) => e,
-        Err(e) => panic!("{} failed with {}", stringify!($e), e),
-    })
-}
-
-mod cc;
-mod channel;
-mod check;
-mod clean;
-mod compile;
-mod config;
-mod dist;
-mod doc;
-mod flags;
-mod native;
-mod sanity;
-mod step;
-mod util;
-
-#[cfg(windows)]
-mod job;
-
-#[cfg(not(windows))]
-mod job {
-    pub unsafe fn setup() {}
-}
-
-pub use build::config::Config;
-pub use build::flags::Flags;
-
-/// A structure representing a Rust compiler.
-///
-/// Each compiler has a `stage` that it is associated with and a `host` that
-/// corresponds to the platform the compiler runs on. This structure is used as
-/// a parameter to many methods below.
-#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
-pub struct Compiler<'a> {
-    stage: u32,
-    host: &'a str,
-}
-
-/// Global configuration for the build system.
-///
-/// This structure transitively contains all configuration for the build system.
-/// All filesystem-encoded configuration is in `config`, all flags are in
-/// `flags`, and then parsed or probed information is listed in the keys below.
-///
-/// This structure is a parameter of almost all methods in the build system,
-/// although most functions are implemented as free functions rather than
-/// methods specifically on this structure itself (to make it easier to
-/// organize).
-pub struct Build {
-    // User-specified configuration via config.toml
-    config: Config,
-
-    // User-specified configuration via CLI flags
-    flags: Flags,
-
-    // Derived properties from the above two configurations
-    cargo: PathBuf,
-    rustc: PathBuf,
-    src: PathBuf,
-    out: PathBuf,
-    release: String,
-    unstable_features: bool,
-    ver_hash: Option<String>,
-    short_ver_hash: Option<String>,
-    ver_date: Option<String>,
-    version: String,
-    package_vers: String,
-    bootstrap_key: String,
-    bootstrap_key_stage0: String,
-
-    // Probed tools at runtime
-    gdb_version: Option<String>,
-    lldb_version: Option<String>,
-    lldb_python_dir: Option<String>,
-
-    // Runtime state filled in later on
-    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
-    cxx: HashMap<String, gcc::Tool>,
-    compiler_rt_built: RefCell<HashMap<String, PathBuf>>,
-}
-
-/// The various "modes" of invoking Cargo.
-///
-/// These entries currently correspond to the various output directories of the
-/// build system, with each mod generating output in a different directory.
-#[derive(Clone, Copy)]
-pub enum Mode {
-    /// This cargo is going to build the standard library, placing output in the
-    /// "stageN-std" directory.
-    Libstd,
-
-    /// This cargo is going to build libtest, placing output in the
-    /// "stageN-test" directory.
-    Libtest,
-
-    /// This cargo is going to build librustc and compiler libraries, placing
-    /// output in the "stageN-rustc" directory.
-    Librustc,
-
-    /// This cargo is going to some build tool, placing output in the
-    /// "stageN-tools" directory.
-    Tool,
-}
-
-impl Build {
-    /// Creates a new set of build configuration from the `flags` on the command
-    /// line and the filesystem `config`.
-    ///
-    /// By default all build output will be placed in the current directory.
-    pub fn new(flags: Flags, config: Config) -> Build {
-        let cwd = t!(env::current_dir());
-        let src = flags.src.clone().unwrap_or(cwd.clone());
-        let out = cwd.join("build");
-
-        let stage0_root = out.join(&config.build).join("stage0/bin");
-        let rustc = match config.rustc {
-            Some(ref s) => PathBuf::from(s),
-            None => stage0_root.join(exe("rustc", &config.build)),
-        };
-        let cargo = match config.cargo {
-            Some(ref s) => PathBuf::from(s),
-            None => stage0_root.join(exe("cargo", &config.build)),
-        };
-
-        Build {
-            flags: flags,
-            config: config,
-            cargo: cargo,
-            rustc: rustc,
-            src: src,
-            out: out,
-
-            release: String::new(),
-            unstable_features: false,
-            ver_hash: None,
-            short_ver_hash: None,
-            ver_date: None,
-            version: String::new(),
-            bootstrap_key: String::new(),
-            bootstrap_key_stage0: String::new(),
-            package_vers: String::new(),
-            cc: HashMap::new(),
-            cxx: HashMap::new(),
-            compiler_rt_built: RefCell::new(HashMap::new()),
-            gdb_version: None,
-            lldb_version: None,
-            lldb_python_dir: None,
-        }
-    }
-
-    /// Executes the entire build, as configured by the flags and configuration.
-    pub fn build(&mut self) {
-        use build::step::Source::*;
-
-        unsafe {
-            job::setup();
-        }
-
-        if self.flags.clean {
-            return clean::clean(self);
-        }
-
-        self.verbose("finding compilers");
-        cc::find(self);
-        self.verbose("running sanity check");
-        sanity::check(self);
-        self.verbose("collecting channel variables");
-        channel::collect(self);
-        self.verbose("updating submodules");
-        self.update_submodules();
-
-        // The main loop of the build system.
-        //
-        // The `step::all` function returns a topographically sorted list of all
-        // steps that need to be executed as part of this build. Each step has a
-        // corresponding entry in `step.rs` and indicates some unit of work that
-        // needs to be done as part of the build.
-        //
-        // Almost all of these are simple one-liners that shell out to the
-        // corresponding functionality in the extra modules, where more
-        // documentation can be found.
-        for target in step::all(self) {
-            let doc_out = self.out.join(&target.target).join("doc");
-            match target.src {
-                Llvm { _dummy } => {
-                    native::llvm(self, target.target);
-                }
-                CompilerRt { _dummy } => {
-                    native::compiler_rt(self, target.target);
-                }
-                TestHelpers { _dummy } => {
-                    native::test_helpers(self, target.target);
-                }
-                Libstd { compiler } => {
-                    compile::std(self, target.target, &compiler);
-                }
-                Libtest { compiler } => {
-                    compile::test(self, target.target, &compiler);
-                }
-                Librustc { compiler } => {
-                    compile::rustc(self, target.target, &compiler);
-                }
-                LibstdLink { compiler, host } => {
-                    compile::std_link(self, target.target, &compiler, host);
-                }
-                LibtestLink { compiler, host } => {
-                    compile::test_link(self, target.target, &compiler, host);
-                }
-                LibrustcLink { compiler, host } => {
-                    compile::rustc_link(self, target.target, &compiler, host);
-                }
-                Rustc { stage: 0 } => {
-                    // nothing to do...
-                }
-                Rustc { stage } => {
-                    compile::assemble_rustc(self, stage, target.target);
-                }
-                ToolLinkchecker { stage } => {
-                    compile::tool(self, stage, target.target, "linkchecker");
-                }
-                ToolRustbook { stage } => {
-                    compile::tool(self, stage, target.target, "rustbook");
-                }
-                ToolErrorIndex { stage } => {
-                    compile::tool(self, stage, target.target,
-                                  "error_index_generator");
-                }
-                ToolCargoTest { stage } => {
-                    compile::tool(self, stage, target.target, "cargotest");
-                }
-                ToolTidy { stage } => {
-                    compile::tool(self, stage, target.target, "tidy");
-                }
-                ToolCompiletest { stage } => {
-                    compile::tool(self, stage, target.target, "compiletest");
-                }
-                DocBook { stage } => {
-                    doc::rustbook(self, stage, target.target, "book", &doc_out);
-                }
-                DocNomicon { stage } => {
-                    doc::rustbook(self, stage, target.target, "nomicon",
-                                  &doc_out);
-                }
-                DocStyle { stage } => {
-                    doc::rustbook(self, stage, target.target, "style",
-                                  &doc_out);
-                }
-                DocStandalone { stage } => {
-                    doc::standalone(self, stage, target.target, &doc_out);
-                }
-                DocStd { stage } => {
-                    doc::std(self, stage, target.target, &doc_out);
-                }
-                DocTest { stage } => {
-                    doc::test(self, stage, target.target, &doc_out);
-                }
-                DocRustc { stage } => {
-                    doc::rustc(self, stage, target.target, &doc_out);
-                }
-                DocErrorIndex { stage } => {
-                    doc::error_index(self, stage, target.target, &doc_out);
-                }
-
-                CheckLinkcheck { stage } => {
-                    check::linkcheck(self, stage, target.target);
-                }
-                CheckCargoTest { stage } => {
-                    check::cargotest(self, stage, target.target);
-                }
-                CheckTidy { stage } => {
-                    check::tidy(self, stage, target.target);
-                }
-                CheckRPass { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass", "run-pass");
-                }
-                CheckRPassFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass", "run-pass-fulldeps");
-                }
-                CheckCFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "compile-fail", "compile-fail");
-                }
-                CheckCFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "compile-fail", "compile-fail-fulldeps")
-                }
-                CheckPFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "parse-fail", "parse-fail");
-                }
-                CheckRFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-fail", "run-fail");
-                }
-                CheckRFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-fail", "run-fail-fulldeps");
-                }
-                CheckPretty { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "pretty");
-                }
-                CheckPrettyRPass { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass");
-                }
-                CheckPrettyRPassFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass-fulldeps");
-                }
-                CheckPrettyRFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-fail");
-                }
-                CheckPrettyRFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-fail-fulldeps");
-                }
-                CheckPrettyRPassValgrind { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass-valgrind");
-                }
-                CheckCodegen { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "codegen", "codegen");
-                }
-                CheckCodegenUnits { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "codegen-units", "codegen-units");
-                }
-                CheckIncremental { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "incremental", "incremental");
-                }
-                CheckUi { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "ui", "ui");
-                }
-                CheckDebuginfo { compiler } => {
-                    if target.target.contains("msvc") {
-                        // nothing to do
-                    } else if target.target.contains("apple") {
-                        check::compiletest(self, &compiler, target.target,
-                                           "debuginfo-lldb", "debuginfo");
-                    } else {
-                        check::compiletest(self, &compiler, target.target,
-                                           "debuginfo-gdb", "debuginfo");
-                    }
-                }
-                CheckRustdoc { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "rustdoc", "rustdoc");
-                }
-                CheckRPassValgrind { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass-valgrind", "run-pass-valgrind");
-                }
-                CheckDocs { compiler } => {
-                    check::docs(self, &compiler);
-                }
-                CheckErrorIndex { compiler } => {
-                    check::error_index(self, &compiler);
-                }
-                CheckRMake { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-make", "run-make")
-                }
-                CheckCrateStd { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Libstd)
-                }
-                CheckCrateTest { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Libtest)
-                }
-                CheckCrateRustc { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Librustc)
-                }
-
-                DistDocs { stage } => dist::docs(self, stage, target.target),
-                DistMingw { _dummy } => dist::mingw(self, target.target),
-                DistRustc { stage } => dist::rustc(self, stage, target.target),
-                DistStd { compiler } => dist::std(self, &compiler, target.target),
-
-                DebuggerScripts { stage } => {
-                    let compiler = Compiler::new(stage, target.target);
-                    dist::debugger_scripts(self,
-                                           &self.sysroot(&compiler),
-                                           target.target);
-                }
-
-                AndroidCopyLibs { compiler } => {
-                    check::android_copy_libs(self, &compiler, target.target);
-                }
-
-                // pseudo-steps
-                Dist { .. } |
-                Doc { .. } |
-                CheckTarget { .. } |
-                Check { .. } => {}
-            }
-        }
-    }
-
-    /// Updates all git submodules that we have.
-    ///
-    /// This will detect if any submodules are out of date an run the necessary
-    /// commands to sync them all with upstream.
-    fn update_submodules(&self) {
-        if !self.config.submodules {
-            return
-        }
-        if fs::metadata(self.src.join(".git")).is_err() {
-            return
-        }
-        let git_submodule = || {
-            let mut cmd = Command::new("git");
-            cmd.current_dir(&self.src).arg("submodule");
-            return cmd
-        };
-
-        // FIXME: this takes a seriously long time to execute on Windows and a
-        //        nontrivial amount of time on Unix, we should have a better way
-        //        of detecting whether we need to run all the submodule commands
-        //        below.
-        let out = output(git_submodule().arg("status"));
-        if !out.lines().any(|l| l.starts_with("+") || l.starts_with("-")) {
-            return
-        }
-
-        self.run(git_submodule().arg("sync"));
-        self.run(git_submodule().arg("init"));
-        self.run(git_submodule().arg("update"));
-        self.run(git_submodule().arg("update").arg("--recursive"));
-        self.run(git_submodule().arg("status").arg("--recursive"));
-        self.run(git_submodule().arg("foreach").arg("--recursive")
-                                .arg("git").arg("clean").arg("-fdx"));
-        self.run(git_submodule().arg("foreach").arg("--recursive")
-                                .arg("git").arg("checkout").arg("."));
-    }
-
-    /// Clear out `dir` if `input` is newer.
-    ///
-    /// After this executes, it will also ensure that `dir` exists.
-    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
-        let stamp = dir.join(".stamp");
-        if mtime(&stamp) < mtime(input) {
-            self.verbose(&format!("Dirty - {}", dir.display()));
-            let _ = fs::remove_dir_all(dir);
-        }
-        t!(fs::create_dir_all(dir));
-        t!(File::create(stamp));
-    }
-
-    /// Prepares an invocation of `cargo` to be run.
-    ///
-    /// This will create a `Command` that represents a pending execution of
-    /// Cargo. This cargo will be configured to use `compiler` as the actual
-    /// rustc compiler, its output will be scoped by `mode`'s output directory,
-    /// it will pass the `--target` flag for the specified `target`, and will be
-    /// executing the Cargo command `cmd`.
-    fn cargo(&self,
-             compiler: &Compiler,
-             mode: Mode,
-             target: &str,
-             cmd: &str) -> Command {
-        let mut cargo = Command::new(&self.cargo);
-        let out_dir = self.stage_out(compiler, mode);
-        cargo.env("CARGO_TARGET_DIR", out_dir)
-             .arg(cmd)
-             .arg("-j").arg(self.jobs().to_string())
-             .arg("--target").arg(target);
-
-        let stage;
-        if compiler.stage == 0 && self.config.local_rebuild {
-            // Assume the local-rebuild rustc already has stage1 features.
-            stage = 1;
-        } else {
-            stage = compiler.stage;
-        }
-
-        // Customize the compiler we're running. Specify the compiler to cargo
-        // as our shim and then pass it some various options used to configure
-        // how the actual compiler itself is called.
-        //
-        // These variables are primarily all read by
-        // src/bootstrap/{rustc,rustdoc.rs}
-        cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
-             .env("RUSTC_REAL", self.compiler_path(compiler))
-             .env("RUSTC_STAGE", stage.to_string())
-             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
-             .env("RUSTC_CODEGEN_UNITS",
-                  self.config.rust_codegen_units.to_string())
-             .env("RUSTC_DEBUG_ASSERTIONS",
-                  self.config.rust_debug_assertions.to_string())
-             .env("RUSTC_SNAPSHOT", &self.rustc)
-             .env("RUSTC_SYSROOT", self.sysroot(compiler))
-             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
-             .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir())
-             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
-             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
-             .env("RUSTDOC_REAL", self.rustdoc(compiler))
-             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
-
-        self.add_bootstrap_key(compiler, &mut cargo);
-
-        // Specify some various options for build scripts used throughout
-        // the build.
-        //
-        // FIXME: the guard against msvc shouldn't need to be here
-        if !target.contains("msvc") {
-            cargo.env(format!("CC_{}", target), self.cc(target))
-                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
-                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
-        }
-
-        // If we're building for OSX, inform the compiler and the linker that
-        // we want to build a compiler runnable on 10.7
-        if target.contains("apple-darwin") {
-            cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7");
-        }
-
-        // Environment variables *required* needed throughout the build
-        //
-        // FIXME: should update code to not require this env var
-        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
-
-        if self.config.verbose || self.flags.verbose {
-            cargo.arg("-v");
-        }
-        if self.config.rust_optimize {
-            cargo.arg("--release");
-        }
-        return cargo
-    }
-
-    /// Get a path to the compiler specified.
-    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.rustc.clone()
-        } else {
-            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
-        }
-    }
-
-    /// Get the specified tool built by the specified compiler
-    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
-        self.cargo_out(compiler, Mode::Tool, compiler.host)
-            .join(exe(tool, compiler.host))
-    }
-
-    /// Get the `rustdoc` executable next to the specified compiler
-    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
-        let mut rustdoc = self.compiler_path(compiler);
-        rustdoc.pop();
-        rustdoc.push(exe("rustdoc", compiler.host));
-        return rustdoc
-    }
-
-    /// Get a `Command` which is ready to run `tool` in `stage` built for
-    /// `host`.
-    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
-        let mut cmd = Command::new(self.tool(&compiler, tool));
-        let host = compiler.host;
-        let paths = vec![
-            self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
-            self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
-            self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
-            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
-        ];
-        add_lib_path(paths, &mut cmd);
-        return cmd
-    }
-
-    /// Get the space-separated set of activated features for the standard
-    /// library.
-    fn std_features(&self) -> String {
-        let mut features = String::new();
-        if self.config.debug_jemalloc {
-            features.push_str(" debug-jemalloc");
-        }
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
-        }
-        return features
-    }
-
-    /// Get the space-separated set of activated features for the compiler.
-    fn rustc_features(&self) -> String {
-        let mut features = String::new();
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
-        }
-        return features
-    }
-
-    /// Component directory that Cargo will produce output into (e.g.
-    /// release/debug)
-    fn cargo_dir(&self) -> &'static str {
-        if self.config.rust_optimize {"release"} else {"debug"}
-    }
-
-    /// Returns the sysroot for the `compiler` specified that *this build system
-    /// generates*.
-    ///
-    /// That is, the sysroot for the stage0 compiler is not what the compiler
-    /// thinks it is by default, but it's the same as the default for stages
-    /// 1-3.
-    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.stage == 0 {
-            self.out.join(compiler.host).join("stage0-sysroot")
-        } else {
-            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
-        }
-    }
-
-    /// Returns the libdir where the standard library and other artifacts are
-    /// found for a compiler's sysroot.
-    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
-        self.sysroot(compiler).join("lib").join("rustlib")
-            .join(target).join("lib")
-    }
-
-    /// Returns the root directory for all output generated in a particular
-    /// stage when running with a particular host compiler.
-    ///
-    /// The mode indicates what the root directory is for.
-    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
-        let suffix = match mode {
-            Mode::Libstd => "-std",
-            Mode::Libtest => "-test",
-            Mode::Tool => "-tools",
-            Mode::Librustc => "-rustc",
-        };
-        self.out.join(compiler.host)
-                .join(format!("stage{}{}", compiler.stage, suffix))
-    }
-
-    /// Returns the root output directory for all Cargo output in a given stage,
-    /// running a particular comipler, wehther or not we're building the
-    /// standard library, and targeting the specified architecture.
-    fn cargo_out(&self,
-                 compiler: &Compiler,
-                 mode: Mode,
-                 target: &str) -> PathBuf {
-        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
-    }
-
-    /// Root output directory for LLVM compiled for `target`
-    ///
-    /// Note that if LLVM is configured externally then the directory returned
-    /// will likely be empty.
-    fn llvm_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("llvm")
-    }
-
-    /// Returns the path to `llvm-config` for the specified target.
-    ///
-    /// If a custom `llvm-config` was specified for target then that's returned
-    /// instead.
-    fn llvm_config(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
-        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-            s.clone()
-        } else {
-            self.llvm_out(&self.config.build).join("bin")
-                .join(exe("llvm-config", target))
-        }
-    }
-
-    /// Returns the path to `FileCheck` binary for the specified target
-    fn llvm_filecheck(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
-        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-            s.parent().unwrap().join(exe("FileCheck", target))
-        } else {
-            let base = self.llvm_out(&self.config.build).join("build");
-            let exe = exe("FileCheck", target);
-            if self.config.build.contains("msvc") {
-                base.join("Release/bin").join(exe)
-            } else {
-                base.join("bin").join(exe)
-            }
-        }
-    }
-
-    /// Root output directory for compiler-rt compiled for `target`
-    fn compiler_rt_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("compiler-rt")
-    }
-
-    /// Root output directory for rust_test_helpers library compiled for
-    /// `target`
-    fn test_helpers_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("rust-test-helpers")
-    }
-
-    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
-    /// library lookup path.
-    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
-        // Windows doesn't need dylib path munging because the dlls for the
-        // compiler live next to the compiler and the system will find them
-        // automatically.
-        if cfg!(windows) {
-            return
-        }
-
-        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
-    }
-
-    /// Adds the compiler's bootstrap key to the environment of `cmd`.
-    fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) {
-        // In stage0 we're using a previously released stable compiler, so we
-        // use the stage0 bootstrap key. Otherwise we use our own build's
-        // bootstrap key.
-        let bootstrap_key = if compiler.is_snapshot(self) && !self.config.local_rebuild {
-            &self.bootstrap_key_stage0
-        } else {
-            &self.bootstrap_key
-        };
-        cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key);
-    }
-
-    /// Returns the compiler's libdir where it stores the dynamic libraries that
-    /// it itself links against.
-    ///
-    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
-    /// Windows.
-    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.rustc_snapshot_libdir()
-        } else {
-            self.sysroot(compiler).join(libdir(compiler.host))
-        }
-    }
-
-    /// Returns the libdir of the snapshot compiler.
-    fn rustc_snapshot_libdir(&self) -> PathBuf {
-        self.rustc.parent().unwrap().parent().unwrap()
-            .join(libdir(&self.config.build))
-    }
-
-    /// Runs a command, printing out nice contextual information if it fails.
-    fn run(&self, cmd: &mut Command) {
-        self.verbose(&format!("running: {:?}", cmd));
-        run_silent(cmd)
-    }
-
-    /// Prints a message if this build is configured in verbose mode.
-    fn verbose(&self, msg: &str) {
-        if self.flags.verbose || self.config.verbose {
-            println!("{}", msg);
-        }
-    }
-
-    /// Returns the number of parallel jobs that have been configured for this
-    /// build.
-    fn jobs(&self) -> u32 {
-        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
-    }
-
-    /// Returns the path to the C compiler for the target specified.
-    fn cc(&self, target: &str) -> &Path {
-        self.cc[target].0.path()
-    }
-
-    /// Returns a list of flags to pass to the C compiler for the target
-    /// specified.
-    fn cflags(&self, target: &str) -> Vec<String> {
-        // Filter out -O and /O (the optimization flags) that we picked up from
-        // gcc-rs because the build scripts will determine that for themselves.
-        let mut base = self.cc[target].0.args().iter()
-                           .map(|s| s.to_string_lossy().into_owned())
-                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
-                           .collect::<Vec<_>>();
-
-        // If we're compiling on OSX then we add a few unconditional flags
-        // indicating that we want libc++ (more filled out than libstdc++) and
-        // we want to compile for 10.7. This way we can ensure that
-        // LLVM/jemalloc/etc are all properly compiled.
-        if target.contains("apple-darwin") {
-            base.push("-stdlib=libc++".into());
-            base.push("-mmacosx-version-min=10.7".into());
-        }
-        return base
-    }
-
-    /// Returns the path to the `ar` archive utility for the target specified.
-    fn ar(&self, target: &str) -> Option<&Path> {
-        self.cc[target].1.as_ref().map(|p| &**p)
-    }
-
-    /// Returns the path to the C++ compiler for the target specified, may panic
-    /// if no C++ compiler was configured for the target.
-    fn cxx(&self, target: &str) -> &Path {
-        self.cxx[target].path()
-    }
-
-    /// Returns flags to pass to the compiler to generate code for `target`.
-    fn rustc_flags(&self, target: &str) -> Vec<String> {
-        // New flags should be added here with great caution!
-        //
-        // It's quite unfortunate to **require** flags to generate code for a
-        // target, so it should only be passed here if absolutely necessary!
-        // Most default configuration should be done through target specs rather
-        // than an entry here.
-
-        let mut base = Vec::new();
-        if target != self.config.build && !target.contains("msvc") {
-            base.push(format!("-Clinker={}", self.cc(target).display()));
-        }
-        return base
-    }
-}
-
-impl<'a> Compiler<'a> {
-    /// Creates a new complier for the specified stage/host
-    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
-        Compiler { stage: stage, host: host }
-    }
-
-    /// Returns whether this is a snapshot compiler for `build`'s configuration
-    fn is_snapshot(&self, build: &Build) -> bool {
-        self.stage == 0 && self.host == build.config.build
-    }
-}
diff --git a/src/bootstrap/build/native.rs b/src/bootstrap/build/native.rs
deleted file mode 100644 (file)
index f6030cf..0000000
+++ /dev/null
@@ -1,238 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Compilation of native dependencies like LLVM.
-//!
-//! Native projects like LLVM unfortunately aren't suited just yet for
-//! compilation in build scripts that Cargo has. This is because thie
-//! compilation takes a *very* long time but also because we don't want to
-//! compile LLVM 3 times as part of a normal bootstrap (we want it cached).
-//!
-//! LLVM and compiler-rt are essentially just wired up to everything else to
-//! ensure that they're always in place if needed.
-
-use std::path::Path;
-use std::process::Command;
-use std::fs::{self, File};
-
-use build_helper::output;
-use cmake;
-use gcc;
-
-use build::Build;
-use build::util::{staticlib, up_to_date};
-
-/// Compile LLVM for `target`.
-pub fn llvm(build: &Build, target: &str) {
-    // If we're using a custom LLVM bail out here, but we can only use a
-    // custom LLVM for the build triple.
-    if let Some(config) = build.config.target_config.get(target) {
-        if let Some(ref s) = config.llvm_config {
-            return check_llvm_version(build, s);
-        }
-    }
-
-    // If the cleaning trigger is newer than our built artifacts (or if the
-    // artifacts are missing) then we keep going, otherwise we bail out.
-    let dst = build.llvm_out(target);
-    let stamp = build.src.join("src/rustllvm/llvm-auto-clean-trigger");
-    let done_stamp = dst.join("llvm-finished-building");
-    build.clear_if_dirty(&dst, &stamp);
-    if fs::metadata(&done_stamp).is_ok() {
-        return
-    }
-
-    println!("Building LLVM for {}", target);
-
-    let _ = fs::remove_dir_all(&dst.join("build"));
-    t!(fs::create_dir_all(&dst.join("build")));
-    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
-
-    // http://llvm.org/docs/CMake.html
-    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
-    if build.config.ninja {
-        cfg.generator("Ninja");
-    }
-    cfg.target(target)
-       .host(&build.config.build)
-       .out_dir(&dst)
-       .profile(if build.config.llvm_optimize {"Release"} else {"Debug"})
-       .define("LLVM_ENABLE_ASSERTIONS", assertions)
-       .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC")
-       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
-       .define("LLVM_INCLUDE_TESTS", "OFF")
-       .define("LLVM_INCLUDE_DOCS", "OFF")
-       .define("LLVM_ENABLE_ZLIB", "OFF")
-       .define("WITH_POLLY", "OFF")
-       .define("LLVM_ENABLE_TERMINFO", "OFF")
-       .define("LLVM_ENABLE_LIBEDIT", "OFF")
-       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string());
-
-    if target.starts_with("i686") {
-        cfg.define("LLVM_BUILD_32_BITS", "ON");
-    }
-
-    // http://llvm.org/docs/HowToCrossCompileLLVM.html
-    if target != build.config.build {
-        // FIXME: if the llvm root for the build triple is overridden then we
-        //        should use llvm-tblgen from there, also should verify that it
-        //        actually exists most of the time in normal installs of LLVM.
-        let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
-        cfg.define("CMAKE_CROSSCOMPILING", "True")
-           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
-           .define("LLVM_TABLEGEN", &host)
-           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
-    }
-
-    // MSVC handles compiler business itself
-    if !target.contains("msvc") {
-        if build.config.ccache {
-           cfg.define("CMAKE_C_COMPILER", "ccache")
-              .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
-              .define("CMAKE_CXX_COMPILER", "ccache")
-              .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
-        } else {
-           cfg.define("CMAKE_C_COMPILER", build.cc(target))
-              .define("CMAKE_CXX_COMPILER", build.cxx(target));
-        }
-        cfg.build_arg("-j").build_arg(build.jobs().to_string());
-
-        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
-        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
-    }
-
-    // FIXME: we don't actually need to build all LLVM tools and all LLVM
-    //        libraries here, e.g. we just want a few components and a few
-    //        tools. Figure out how to filter them down and only build the right
-    //        tools and libs on all platforms.
-    cfg.build();
-
-    t!(File::create(&done_stamp));
-}
-
-fn check_llvm_version(build: &Build, llvm_config: &Path) {
-    if !build.config.llvm_version_check {
-        return
-    }
-
-    let mut cmd = Command::new(llvm_config);
-    let version = output(cmd.arg("--version"));
-    if version.starts_with("3.5") || version.starts_with("3.6") ||
-       version.starts_with("3.7") {
-        return
-    }
-    panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
-}
-
-/// Compiles the `compiler-rt` library, or at least the builtins part of it.
-///
-/// This uses the CMake build system and an existing LLVM build directory to
-/// compile the project.
-pub fn compiler_rt(build: &Build, target: &str) {
-    let dst = build.compiler_rt_out(target);
-    let arch = target.split('-').next().unwrap();
-    let mode = if build.config.rust_optimize {"Release"} else {"Debug"};
-
-    let build_llvm_config = build.llvm_config(&build.config.build);
-    let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt"));
-    cfg.target(target)
-       .host(&build.config.build)
-       .out_dir(&dst)
-       .profile(mode)
-       .define("LLVM_CONFIG_PATH", build_llvm_config)
-       .define("COMPILER_RT_DEFAULT_TARGET_TRIPLE", target)
-       .define("COMPILER_RT_BUILD_SANITIZERS", "OFF")
-       .define("COMPILER_RT_BUILD_EMUTLS", "OFF")
-       // inform about c/c++ compilers, the c++ compiler isn't actually used but
-       // it's needed to get the initial configure to work on all platforms.
-       .define("CMAKE_C_COMPILER", build.cc(target))
-       .define("CMAKE_CXX_COMPILER", build.cc(target));
-
-    let (dir, build_target, libname) = if target.contains("linux") ||
-                                          target.contains("freebsd") ||
-                                          target.contains("netbsd") {
-        let os_extra = if target.contains("android") && target.contains("arm") {
-            "-android"
-        } else {
-            ""
-        };
-        let builtins_arch = match arch {
-            "i586" => "i386",
-            "arm" | "armv7" if target.contains("android") => "armhf",
-            "arm" if target.contains("eabihf") => "armhf",
-            _ => arch,
-        };
-        let target = format!("clang_rt.builtins-{}", builtins_arch);
-        ("linux".to_string(),
-         target.clone(),
-         format!("{}{}", target, os_extra))
-    } else if target.contains("apple-darwin") {
-        let builtins_arch = match arch {
-            "i686" => "i386",
-            _ => arch,
-        };
-        let target = format!("clang_rt.builtins_{}_osx", builtins_arch);
-        ("builtins".to_string(), target.clone(), target)
-    } else if target.contains("apple-ios") {
-        cfg.define("COMPILER_RT_ENABLE_IOS", "ON");
-        let target = match arch {
-            "armv7s" => "hard_pic_armv7em_macho_embedded".to_string(),
-            "aarch64" => "builtins_arm64_ios".to_string(),
-            _ => format!("hard_pic_{}_macho_embedded", arch),
-        };
-        ("builtins".to_string(), target.clone(), target)
-    } else if target.contains("windows-gnu") {
-        let target = format!("clang_rt.builtins-{}", arch);
-        ("windows".to_string(), target.clone(), target)
-    } else if target.contains("windows-msvc") {
-        let builtins_arch = match arch {
-            "i586" | "i686" => "i386",
-            _ => arch,
-        };
-        (format!("windows/{}", mode),
-         "lib/builtins/builtins".to_string(),
-         format!("clang_rt.builtins-{}", builtins_arch))
-    } else {
-        panic!("can't get os from target: {}", target)
-    };
-    let output = dst.join("build/lib").join(dir)
-                    .join(staticlib(&libname, target));
-    build.compiler_rt_built.borrow_mut().insert(target.to_string(),
-                                                output.clone());
-    if fs::metadata(&output).is_ok() {
-        return
-    }
-    let _ = fs::remove_dir_all(&dst);
-    t!(fs::create_dir_all(&dst));
-    cfg.build_target(&build_target);
-    cfg.build();
-}
-
-/// Compiles the `rust_test_helpers.c` library which we used in various
-/// `run-pass` test suites for ABI testing.
-pub fn test_helpers(build: &Build, target: &str) {
-    let dst = build.test_helpers_out(target);
-    let src = build.src.join("src/rt/rust_test_helpers.c");
-    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
-        return
-    }
-
-    println!("Building test helpers");
-    t!(fs::create_dir_all(&dst));
-    let mut cfg = gcc::Config::new();
-    cfg.cargo_metadata(false)
-       .out_dir(&dst)
-       .target(target)
-       .host(&build.config.build)
-       .opt_level(0)
-       .debug(false)
-       .file(build.src.join("src/rt/rust_test_helpers.c"))
-       .compile("librust_test_helpers.a");
-}
diff --git a/src/bootstrap/build/sanity.rs b/src/bootstrap/build/sanity.rs
deleted file mode 100644 (file)
index 5eced00..0000000
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Sanity checking performed by rustbuild before actually executing anything.
-//!
-//! This module contains the implementation of ensuring that the build
-//! environment looks reasonable before progressing. This will verify that
-//! various programs like git and python exist, along with ensuring that all C
-//! compilers for cross-compiling are found.
-//!
-//! In theory if we get past this phase it's a bug if a build fails, but in
-//! practice that's likely not true!
-
-use std::collections::HashSet;
-use std::env;
-use std::ffi::{OsStr, OsString};
-use std::fs;
-use std::process::Command;
-
-use build_helper::output;
-
-use build::Build;
-
-pub fn check(build: &mut Build) {
-    let mut checked = HashSet::new();
-    let path = env::var_os("PATH").unwrap_or(OsString::new());
-    let mut need_cmd = |cmd: &OsStr| {
-        if !checked.insert(cmd.to_owned()) {
-            return
-        }
-        for path in env::split_paths(&path).map(|p| p.join(cmd)) {
-            if fs::metadata(&path).is_ok() ||
-               fs::metadata(path.with_extension("exe")).is_ok() {
-                return
-            }
-        }
-        panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
-    };
-
-    // If we've got a git directory we're gona need git to update
-    // submodules and learn about various other aspects.
-    if fs::metadata(build.src.join(".git")).is_ok() {
-        need_cmd("git".as_ref());
-    }
-
-    // We need cmake, but only if we're actually building LLVM
-    for host in build.config.host.iter() {
-        if let Some(config) = build.config.target_config.get(host) {
-            if config.llvm_config.is_some() {
-                continue
-            }
-        }
-        need_cmd("cmake".as_ref());
-        if build.config.ninja {
-            need_cmd("ninja".as_ref())
-        }
-        break
-    }
-
-    need_cmd("python".as_ref());
-
-    // We're gonna build some custom C code here and there, host triples
-    // also build some C++ shims for LLVM so we need a C++ compiler.
-    for target in build.config.target.iter() {
-        need_cmd(build.cc(target).as_ref());
-        if let Some(ar) = build.ar(target) {
-            need_cmd(ar.as_ref());
-        }
-    }
-    for host in build.config.host.iter() {
-        need_cmd(build.cxx(host).as_ref());
-    }
-
-    // Externally configured LLVM requires FileCheck to exist
-    let filecheck = build.llvm_filecheck(&build.config.build);
-    if !filecheck.starts_with(&build.out) && !filecheck.exists() {
-        panic!("filecheck executable {:?} does not exist", filecheck);
-    }
-
-    for target in build.config.target.iter() {
-        // Either can't build or don't want to run jemalloc on these targets
-        if target.contains("rumprun") ||
-           target.contains("bitrig") ||
-           target.contains("openbsd") ||
-           target.contains("msvc") {
-            build.config.use_jemalloc = false;
-        }
-
-        // Can't compile for iOS unless we're on OSX
-        if target.contains("apple-ios") &&
-           !build.config.build.contains("apple-darwin") {
-            panic!("the iOS target is only supported on OSX");
-        }
-
-        // Make sure musl-root is valid if specified
-        if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) {
-            match build.config.musl_root {
-                Some(ref root) => {
-                    if fs::metadata(root.join("lib/libc.a")).is_err() {
-                        panic!("couldn't find libc.a in musl dir: {}",
-                               root.join("lib").display());
-                    }
-                    if fs::metadata(root.join("lib/libunwind.a")).is_err() {
-                        panic!("couldn't find libunwind.a in musl dir: {}",
-                               root.join("lib").display());
-                    }
-                }
-                None => {
-                    panic!("when targeting MUSL the build.musl-root option \
-                            must be specified in config.toml")
-                }
-            }
-        }
-
-        if target.contains("msvc") {
-            // There are three builds of cmake on windows: MSVC, MinGW, and
-            // Cygwin. The Cygwin build does not have generators for Visual
-            // Studio, so detect that here and error.
-            let out = output(Command::new("cmake").arg("--help"));
-            if !out.contains("Visual Studio") {
-                panic!("
-cmake does not support Visual Studio generators.
-
-This is likely due to it being an msys/cygwin build of cmake,
-rather than the required windows version, built using MinGW
-or Visual Studio.
-
-If you are building under msys2 try installing the mingw-w64-x86_64-cmake
-package instead of cmake:
-
-$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
-");
-            }
-        }
-
-        if target.contains("arm-linux-android") {
-            need_cmd("adb".as_ref());
-        }
-    }
-
-    for host in build.flags.host.iter() {
-        if !build.config.host.contains(host) {
-            panic!("specified host `{}` is not in the ./configure list", host);
-        }
-    }
-    for target in build.flags.target.iter() {
-        if !build.config.target.contains(target) {
-            panic!("specified target `{}` is not in the ./configure list",
-                   target);
-        }
-    }
-
-    let run = |cmd: &mut Command| {
-        cmd.output().map(|output| {
-            String::from_utf8_lossy(&output.stdout)
-                   .lines().next().unwrap()
-                   .to_string()
-        })
-    };
-    build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
-    build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
-    if build.lldb_version.is_some() {
-        build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
-    }
-}
diff --git a/src/bootstrap/build/step.rs b/src/bootstrap/build/step.rs
deleted file mode 100644 (file)
index 7cbbd67..0000000
+++ /dev/null
@@ -1,590 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Major workhorse of rustbuild, definition and dependencies between stages of
-//! the copmile.
-//!
-//! The primary purpose of this module is to define the various `Step`s of
-//! execution of the build. Each `Step` has a corresponding `Source` indicating
-//! what it's actually doing along with a number of dependencies which must be
-//! executed first.
-//!
-//! This module will take the CLI as input and calculate the steps required for
-//! the build requested, ensuring that all intermediate pieces are in place.
-//! Essentially this module is a `make`-replacement, but not as good.
-
-use std::collections::HashSet;
-
-use build::{Build, Compiler};
-
-#[derive(Hash, Eq, PartialEq, Clone, Debug)]
-pub struct Step<'a> {
-    pub src: Source<'a>,
-    pub target: &'a str,
-}
-
-/// Macro used to iterate over all targets that are recognized by the build
-/// system.
-///
-/// Whenever a new step is added it will involve adding an entry here, updating
-/// the dependencies section below, and then adding an implementation of the
-/// step in `build/mod.rs`.
-///
-/// This macro takes another macro as an argument and then calls that macro with
-/// all steps that the build system knows about.
-macro_rules! targets {
-    ($m:ident) => {
-        $m! {
-            // Step representing building the stageN compiler. This is just the
-            // compiler executable itself, not any of the support libraries
-            (rustc, Rustc { stage: u32 }),
-
-            // Steps for the two main cargo builds. These are parameterized over
-            // the compiler which is producing the artifact.
-            (libstd, Libstd { compiler: Compiler<'a> }),
-            (libtest, Libtest { compiler: Compiler<'a> }),
-            (librustc, Librustc { compiler: Compiler<'a> }),
-
-            // Links the target produced by the compiler provided into the
-            // host's directory also provided.
-            (libstd_link, LibstdLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-            (libtest_link, LibtestLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-            (librustc_link, LibrustcLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-
-            // Various tools that we can build as part of the build.
-            (tool_linkchecker, ToolLinkchecker { stage: u32 }),
-            (tool_rustbook, ToolRustbook { stage: u32 }),
-            (tool_error_index, ToolErrorIndex { stage: u32 }),
-            (tool_cargotest, ToolCargoTest { stage: u32 }),
-            (tool_tidy, ToolTidy { stage: u32 }),
-            (tool_compiletest, ToolCompiletest { stage: u32 }),
-
-            // Steps for long-running native builds. Ideally these wouldn't
-            // actually exist and would be part of build scripts, but for now
-            // these are here.
-            //
-            // There aren't really any parameters to this, but empty structs
-            // with braces are unstable so we just pick something that works.
-            (llvm, Llvm { _dummy: () }),
-            (compiler_rt, CompilerRt { _dummy: () }),
-            (test_helpers, TestHelpers { _dummy: () }),
-            (debugger_scripts, DebuggerScripts { stage: u32 }),
-
-            // Steps for various pieces of documentation that we can generate,
-            // the 'doc' step is just a pseudo target to depend on a bunch of
-            // others.
-            (doc, Doc { stage: u32 }),
-            (doc_book, DocBook { stage: u32 }),
-            (doc_nomicon, DocNomicon { stage: u32 }),
-            (doc_style, DocStyle { stage: u32 }),
-            (doc_standalone, DocStandalone { stage: u32 }),
-            (doc_std, DocStd { stage: u32 }),
-            (doc_test, DocTest { stage: u32 }),
-            (doc_rustc, DocRustc { stage: u32 }),
-            (doc_error_index, DocErrorIndex { stage: u32 }),
-
-            // Steps for running tests. The 'check' target is just a pseudo
-            // target to depend on a bunch of others.
-            (check, Check { stage: u32, compiler: Compiler<'a> }),
-            (check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
-            (check_linkcheck, CheckLinkcheck { stage: u32 }),
-            (check_cargotest, CheckCargoTest { stage: u32 }),
-            (check_tidy, CheckTidy { stage: u32 }),
-            (check_rpass, CheckRPass { compiler: Compiler<'a> }),
-            (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
-            (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
-            (check_rfail, CheckRFail { compiler: Compiler<'a> }),
-            (check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }),
-            (check_cfail, CheckCFail { compiler: Compiler<'a> }),
-            (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
-            (check_pfail, CheckPFail { compiler: Compiler<'a> }),
-            (check_pretty, CheckPretty { compiler: Compiler<'a> }),
-            (check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }),
-            (check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }),
-            (check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }),
-            (check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }),
-            (check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }),
-            (check_codegen, CheckCodegen { compiler: Compiler<'a> }),
-            (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
-            (check_incremental, CheckIncremental { compiler: Compiler<'a> }),
-            (check_ui, CheckUi { compiler: Compiler<'a> }),
-            (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
-            (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
-            (check_docs, CheckDocs { compiler: Compiler<'a> }),
-            (check_error_index, CheckErrorIndex { compiler: Compiler<'a> }),
-            (check_rmake, CheckRMake { compiler: Compiler<'a> }),
-            (check_crate_std, CheckCrateStd { compiler: Compiler<'a> }),
-            (check_crate_test, CheckCrateTest { compiler: Compiler<'a> }),
-            (check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }),
-
-            // Distribution targets, creating tarballs
-            (dist, Dist { stage: u32 }),
-            (dist_docs, DistDocs { stage: u32 }),
-            (dist_mingw, DistMingw { _dummy: () }),
-            (dist_rustc, DistRustc { stage: u32 }),
-            (dist_std, DistStd { compiler: Compiler<'a> }),
-
-            // Misc targets
-            (android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
-        }
-    }
-}
-
-// Define the `Source` enum by iterating over all the steps and peeling out just
-// the types that we want to define.
-
-macro_rules! item { ($a:item) => ($a) }
-
-macro_rules! define_source {
-    ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {
-        item! {
-            #[derive(Hash, Eq, PartialEq, Clone, Debug)]
-            pub enum Source<'a> {
-                $($name { $($args)* }),*
-            }
-        }
-    }
-}
-
-targets!(define_source);
-
-/// Calculate a list of all steps described by `build`.
-///
-/// This will inspect the flags passed in on the command line and use that to
-/// build up a list of steps to execute. These steps will then be transformed
-/// into a topologically sorted list which when executed left-to-right will
-/// correctly sequence the entire build.
-pub fn all(build: &Build) -> Vec<Step> {
-    let mut ret = Vec::new();
-    let mut all = HashSet::new();
-    for target in top_level(build) {
-        fill(build, &target, &mut ret, &mut all);
-    }
-    return ret;
-
-    fn fill<'a>(build: &'a Build,
-                target: &Step<'a>,
-                ret: &mut Vec<Step<'a>>,
-                set: &mut HashSet<Step<'a>>) {
-        if set.insert(target.clone()) {
-            for dep in target.deps(build) {
-                fill(build, &dep, ret, set);
-            }
-            ret.push(target.clone());
-        }
-    }
-}
-
-/// Determines what top-level targets are requested as part of this build,
-/// returning them as a list.
-fn top_level(build: &Build) -> Vec<Step> {
-    let mut targets = Vec::new();
-    let stage = build.flags.stage.unwrap_or(2);
-
-    let host = Step {
-        src: Source::Llvm { _dummy: () },
-        target: build.flags.host.iter().next()
-                     .unwrap_or(&build.config.build),
-    };
-    let target = Step {
-        src: Source::Llvm { _dummy: () },
-        target: build.flags.target.iter().next().map(|x| &x[..])
-                     .unwrap_or(host.target)
-    };
-
-    // First, try to find steps on the command line.
-    add_steps(build, stage, &host, &target, &mut targets);
-
-    // If none are specified, then build everything.
-    if targets.len() == 0 {
-        let t = Step {
-            src: Source::Llvm { _dummy: () },
-            target: &build.config.build,
-        };
-        if build.config.docs {
-          targets.push(t.doc(stage));
-        }
-        for host in build.config.host.iter() {
-            if !build.flags.host.contains(host) {
-                continue
-            }
-            let host = t.target(host);
-            if host.target == build.config.build {
-                targets.push(host.librustc(host.compiler(stage)));
-            } else {
-                targets.push(host.librustc_link(t.compiler(stage), host.target));
-            }
-            for target in build.config.target.iter() {
-                if !build.flags.target.contains(target) {
-                    continue
-                }
-
-                if host.target == build.config.build {
-                    targets.push(host.target(target)
-                                     .libtest(host.compiler(stage)));
-                } else {
-                    targets.push(host.target(target)
-                                     .libtest_link(t.compiler(stage), host.target));
-                }
-            }
-        }
-    }
-
-    return targets
-
-}
-
-fn add_steps<'a>(build: &'a Build,
-                 stage: u32,
-                 host: &Step<'a>,
-                 target: &Step<'a>,
-                 targets: &mut Vec<Step<'a>>) {
-    struct Context<'a> {
-        stage: u32,
-        compiler: Compiler<'a>,
-        _dummy: (),
-        host: &'a str,
-    }
-    for step in build.flags.step.iter() {
-
-        // The macro below insists on hygienic access to all local variables, so
-        // we shove them all in a struct and subvert hygiene by accessing struct
-        // fields instead,
-        let cx = Context {
-            stage: stage,
-            compiler: host.target(&build.config.build).compiler(stage),
-            _dummy: (),
-            host: host.target,
-        };
-        macro_rules! add_step {
-            ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(
-                let name = stringify!($short).replace("_", "-");
-                if &step[..] == &name[..] {
-                    targets.push(target.$short($(cx.$arg),*));
-                    continue
-                }
-                drop(name);
-            )*})
-        }
-
-        targets!(add_step);
-
-        panic!("unknown step: {}", step);
-    }
-}
-
-macro_rules! constructors {
-    ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(
-        fn $short(&self, $($arg: $t),*) -> Step<'a> {
-            Step {
-                src: Source::$name { $($arg: $arg),* },
-                target: self.target,
-            }
-        }
-    )*}
-}
-
-impl<'a> Step<'a> {
-    fn compiler(&self, stage: u32) -> Compiler<'a> {
-        Compiler::new(stage, self.target)
-    }
-
-    fn target(&self, target: &'a str) -> Step<'a> {
-        Step { target: target, src: self.src.clone() }
-    }
-
-    // Define ergonomic constructors for each step defined above so they can be
-    // easily constructed.
-    targets!(constructors);
-
-    /// Mapping of all dependencies for rustbuild.
-    ///
-    /// This function receives a step, the build that we're building for, and
-    /// then returns a list of all the dependencies of that step.
-    pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {
-        match self.src {
-            Source::Rustc { stage: 0 } => {
-                Vec::new()
-            }
-            Source::Rustc { stage } => {
-                let compiler = Compiler::new(stage - 1, &build.config.build);
-                vec![self.librustc(compiler)]
-            }
-            Source::Librustc { compiler } => {
-                vec![self.libtest(compiler), self.llvm(())]
-            }
-            Source::Libtest { compiler } => {
-                vec![self.libstd(compiler)]
-            }
-            Source::Libstd { compiler } => {
-                vec![self.compiler_rt(()),
-                     self.rustc(compiler.stage).target(compiler.host)]
-            }
-            Source::LibrustcLink { compiler, host } => {
-                vec![self.librustc(compiler),
-                     self.libtest_link(compiler, host)]
-            }
-            Source::LibtestLink { compiler, host } => {
-                vec![self.libtest(compiler), self.libstd_link(compiler, host)]
-            }
-            Source::LibstdLink { compiler, host } => {
-                vec![self.libstd(compiler),
-                     self.target(host).rustc(compiler.stage)]
-            }
-            Source::CompilerRt { _dummy } => {
-                vec![self.llvm(()).target(&build.config.build)]
-            }
-            Source::Llvm { _dummy } => Vec::new(),
-            Source::TestHelpers { _dummy } => Vec::new(),
-            Source::DebuggerScripts { stage: _ } => Vec::new(),
-
-            // Note that all doc targets depend on artifacts from the build
-            // architecture, not the target (which is where we're generating
-            // docs into).
-            Source::DocStd { stage } => {
-                let compiler = self.target(&build.config.build).compiler(stage);
-                vec![self.libstd(compiler)]
-            }
-            Source::DocTest { stage } => {
-                let compiler = self.target(&build.config.build).compiler(stage);
-                vec![self.libtest(compiler)]
-            }
-            Source::DocBook { stage } |
-            Source::DocNomicon { stage } |
-            Source::DocStyle { stage } => {
-                vec![self.target(&build.config.build).tool_rustbook(stage)]
-            }
-            Source::DocErrorIndex { stage } => {
-                vec![self.target(&build.config.build).tool_error_index(stage)]
-            }
-            Source::DocStandalone { stage } => {
-                vec![self.target(&build.config.build).rustc(stage)]
-            }
-            Source::DocRustc { stage } => {
-                vec![self.doc_test(stage)]
-            }
-            Source::Doc { stage } => {
-                vec![self.doc_book(stage), self.doc_nomicon(stage),
-                     self.doc_style(stage), self.doc_standalone(stage),
-                     self.doc_std(stage),
-                     self.doc_error_index(stage)]
-            }
-            Source::Check { stage, compiler } => {
-                // Check is just a pseudo step which means check all targets,
-                // so just depend on checking all targets.
-                build.config.target.iter().map(|t| {
-                    self.target(t).check_target(stage, compiler)
-                }).collect()
-            }
-            Source::CheckTarget { stage, compiler } => {
-                // CheckTarget here means run all possible test suites for this
-                // target. Most of the time, however, we can't actually run
-                // anything if we're not the build triple as we could be cross
-                // compiling.
-                //
-                // As a result, the base set of targets here is quite stripped
-                // down from the standard set of targets. These suites have
-                // their own internal logic to run in cross-compiled situations
-                // if they'll run at all. For example compiletest knows that
-                // when testing Android targets we ship artifacts to the
-                // emulator.
-                //
-                // When in doubt the rule of thumb for adding to this list is
-                // "should this test suite run on the android bot?"
-                let mut base = vec![
-                    self.check_rpass(compiler),
-                    self.check_rfail(compiler),
-                    self.check_crate_std(compiler),
-                    self.check_crate_test(compiler),
-                    self.check_debuginfo(compiler),
-                    self.dist(stage),
-                ];
-
-                // If we're testing the build triple, then we know we can
-                // actually run binaries and such, so we run all possible tests
-                // that we know about.
-                if self.target == build.config.build {
-                    base.extend(vec![
-                        // docs-related
-                        self.check_docs(compiler),
-                        self.check_error_index(compiler),
-                        self.check_rustdoc(compiler),
-
-                        // UI-related
-                        self.check_cfail(compiler),
-                        self.check_pfail(compiler),
-                        self.check_ui(compiler),
-
-                        // codegen-related
-                        self.check_incremental(compiler),
-                        self.check_codegen(compiler),
-                        self.check_codegen_units(compiler),
-
-                        // misc compiletest-test suites
-                        self.check_rpass_full(compiler),
-                        self.check_rfail_full(compiler),
-                        self.check_cfail_full(compiler),
-                        self.check_pretty_rpass_full(compiler),
-                        self.check_pretty_rfail_full(compiler),
-                        self.check_rpass_valgrind(compiler),
-                        self.check_rmake(compiler),
-
-                        // crates
-                        self.check_crate_rustc(compiler),
-
-                        // pretty
-                        self.check_pretty(compiler),
-                        self.check_pretty_rpass(compiler),
-                        self.check_pretty_rfail(compiler),
-                        self.check_pretty_rpass_valgrind(compiler),
-
-                        // misc
-                        self.check_linkcheck(stage),
-                        self.check_tidy(stage),
-                    ]);
-                }
-                return base
-            }
-            Source::CheckLinkcheck { stage } => {
-                vec![self.tool_linkchecker(stage), self.doc(stage)]
-            }
-            Source::CheckCargoTest { stage } => {
-                vec![self.tool_cargotest(stage),
-                     self.librustc(self.compiler(stage))]
-            }
-            Source::CheckTidy { stage } => {
-                vec![self.tool_tidy(stage)]
-            }
-            Source::CheckPrettyRPass { compiler } |
-            Source::CheckPrettyRFail { compiler } |
-            Source::CheckRFail { compiler } |
-            Source::CheckPFail { compiler } |
-            Source::CheckCodegen { compiler } |
-            Source::CheckCodegenUnits { compiler } |
-            Source::CheckIncremental { compiler } |
-            Source::CheckUi { compiler } |
-            Source::CheckRustdoc { compiler } |
-            Source::CheckPretty { compiler } |
-            Source::CheckCFail { compiler } |
-            Source::CheckRPassValgrind { compiler } |
-            Source::CheckRPass { compiler } => {
-                let mut base = vec![
-                    self.libtest(compiler),
-                    self.target(compiler.host).tool_compiletest(compiler.stage),
-                    self.test_helpers(()),
-                ];
-                if self.target.contains("android") {
-                    base.push(self.android_copy_libs(compiler));
-                }
-                base
-            }
-            Source::CheckDebuginfo { compiler } => {
-                vec![
-                    self.libtest(compiler),
-                    self.target(compiler.host).tool_compiletest(compiler.stage),
-                    self.test_helpers(()),
-                    self.debugger_scripts(compiler.stage),
-                ]
-            }
-            Source::CheckRPassFull { compiler } |
-            Source::CheckRFailFull { compiler } |
-            Source::CheckCFailFull { compiler } |
-            Source::CheckPrettyRPassFull { compiler } |
-            Source::CheckPrettyRFailFull { compiler } |
-            Source::CheckPrettyRPassValgrind { compiler } |
-            Source::CheckRMake { compiler } => {
-                vec![self.librustc(compiler),
-                     self.target(compiler.host).tool_compiletest(compiler.stage)]
-            }
-            Source::CheckDocs { compiler } => {
-                vec![self.libstd(compiler)]
-            }
-            Source::CheckErrorIndex { compiler } => {
-                vec![self.libstd(compiler),
-                     self.target(compiler.host).tool_error_index(compiler.stage)]
-            }
-            Source::CheckCrateStd { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-            Source::CheckCrateTest { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-            Source::CheckCrateRustc { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-
-            Source::ToolLinkchecker { stage } |
-            Source::ToolTidy { stage } => {
-                vec![self.libstd(self.compiler(stage))]
-            }
-            Source::ToolErrorIndex { stage } |
-            Source::ToolRustbook { stage } => {
-                vec![self.librustc(self.compiler(stage))]
-            }
-            Source::ToolCargoTest { stage } => {
-                vec![self.libstd(self.compiler(stage))]
-            }
-            Source::ToolCompiletest { stage } => {
-                vec![self.libtest(self.compiler(stage))]
-            }
-
-            Source::DistDocs { stage } => vec![self.doc(stage)],
-            Source::DistMingw { _dummy: _ } => Vec::new(),
-            Source::DistRustc { stage } => {
-                vec![self.rustc(stage)]
-            }
-            Source::DistStd { compiler } => {
-                // We want to package up as many target libraries as possible
-                // for the `rust-std` package, so if this is a host target we
-                // depend on librustc and otherwise we just depend on libtest.
-                if build.config.host.iter().any(|t| t == self.target) {
-                    vec![self.librustc(compiler)]
-                } else {
-                    vec![self.libtest(compiler)]
-                }
-            }
-
-            Source::Dist { stage } => {
-                let mut base = Vec::new();
-
-                for host in build.config.host.iter() {
-                    let host = self.target(host);
-                    base.push(host.dist_rustc(stage));
-                    if host.target.contains("windows-gnu") {
-                        base.push(host.dist_mingw(()));
-                    }
-
-                    let compiler = self.compiler(stage);
-                    for target in build.config.target.iter() {
-                        let target = self.target(target);
-                        if build.config.docs {
-                            base.push(target.dist_docs(stage));
-                        }
-                        base.push(target.dist_std(compiler));
-                    }
-                }
-                return base
-            }
-
-            Source::AndroidCopyLibs { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-        }
-    }
-}
diff --git a/src/bootstrap/build/util.rs b/src/bootstrap/build/util.rs
deleted file mode 100644 (file)
index 36ce064..0000000
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Various utility functions used throughout rustbuild.
-//!
-//! Simple things like testing the various filesystem operations here and there,
-//! not a lot of interesting happenings here unfortunately.
-
-use std::env;
-use std::path::{Path, PathBuf};
-use std::fs;
-use std::process::Command;
-
-use bootstrap::{dylib_path, dylib_path_var};
-use filetime::FileTime;
-
-/// Returns the `name` as the filename of a static library for `target`.
-pub fn staticlib(name: &str, target: &str) -> String {
-    if target.contains("windows-msvc") {
-        format!("{}.lib", name)
-    } else {
-        format!("lib{}.a", name)
-    }
-}
-
-/// Returns the last-modified time for `path`, or zero if it doesn't exist.
-pub fn mtime(path: &Path) -> FileTime {
-    fs::metadata(path).map(|f| {
-        FileTime::from_last_modification_time(&f)
-    }).unwrap_or(FileTime::zero())
-}
-
-/// Copies a file from `src` to `dst`, attempting to use hard links and then
-/// falling back to an actually filesystem copy if necessary.
-pub fn copy(src: &Path, dst: &Path) {
-    let res = fs::hard_link(src, dst);
-    let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
-    if let Err(e) = res {
-        panic!("failed to copy `{}` to `{}`: {}", src.display(),
-               dst.display(), e)
-    }
-}
-
-/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
-/// when this function is called.
-pub fn cp_r(src: &Path, dst: &Path) {
-    for f in t!(fs::read_dir(src)) {
-        let f = t!(f);
-        let path = f.path();
-        let name = path.file_name().unwrap();
-        let dst = dst.join(name);
-        if t!(f.file_type()).is_dir() {
-            let _ = fs::remove_dir_all(&dst);
-            t!(fs::create_dir(&dst));
-            cp_r(&path, &dst);
-        } else {
-            let _ = fs::remove_file(&dst);
-            copy(&path, &dst);
-        }
-    }
-}
-
-/// Given an executable called `name`, return the filename for the
-/// executable for a particular target.
-pub fn exe(name: &str, target: &str) -> String {
-    if target.contains("windows") {
-        format!("{}.exe", name)
-    } else {
-        name.to_string()
-    }
-}
-
-/// Returns whether the file name given looks like a dynamic library.
-pub fn is_dylib(name: &str) -> bool {
-    name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
-}
-
-/// Returns the corresponding relative library directory that the compiler's
-/// dylibs will be found in.
-pub fn libdir(target: &str) -> &'static str {
-    if target.contains("windows") {"bin"} else {"lib"}
-}
-
-/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
-pub fn add_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
-    let mut list = dylib_path();
-    for path in path {
-        list.insert(0, path);
-    }
-    cmd.env(dylib_path_var(), t!(env::join_paths(list)));
-}
-
-/// Returns whether `dst` is up to date given that the file or files in `src`
-/// are used to generate it.
-///
-/// Uses last-modified time checks to verify this.
-pub fn up_to_date(src: &Path, dst: &Path) -> bool {
-    let threshold = mtime(dst);
-    let meta = t!(fs::metadata(src));
-    if meta.is_dir() {
-        dir_up_to_date(src, &threshold)
-    } else {
-        FileTime::from_last_modification_time(&meta) <= threshold
-    }
-}
-
-fn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {
-    t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {
-        let meta = t!(e.metadata());
-        if meta.is_dir() {
-            dir_up_to_date(&e.path(), threshold)
-        } else {
-            FileTime::from_last_modification_time(&meta) < *threshold
-        }
-    })
-}
diff --git a/src/bootstrap/cc.rs b/src/bootstrap/cc.rs
new file mode 100644 (file)
index 0000000..e2bde4a
--- /dev/null
@@ -0,0 +1,124 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! C-compiler probing and detection.
+//!
+//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
+//! C and C++ compilers for each target configured. A compiler is found through
+//! a number of vectors (in order of precedence)
+//!
+//! 1. Configuration via `target.$target.cc` in `config.toml`.
+//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
+//!    applicable
+//! 3. Special logic to probe on OpenBSD
+//! 4. The `CC_$target` environment variable.
+//! 5. The `CC` environment variable.
+//! 6. "cc"
+//!
+//! Some of this logic is implemented here, but much of it is farmed out to the
+//! `gcc` crate itself, so we end up having the same fallbacks as there.
+//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
+//! used.
+//!
+//! It is intended that after this module has run no C/C++ compiler will
+//! ever be probed for. Instead the compilers found here will be used for
+//! everything.
+
+use std::process::Command;
+
+use build_helper::{cc2ar, output};
+use gcc;
+
+use Build;
+use config::Target;
+
+pub fn find(build: &mut Build) {
+    // For all targets we're going to need a C compiler for building some shims
+    // and such as well as for being a linker for Rust code.
+    for target in build.config.target.iter() {
+        let mut cfg = gcc::Config::new();
+        cfg.cargo_metadata(false).opt_level(0).debug(false)
+           .target(target).host(&build.config.build);
+
+        let config = build.config.target_config.get(target);
+        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
+            cfg.compiler(cc);
+        } else {
+            set_compiler(&mut cfg, "gcc", target, config);
+        }
+
+        let compiler = cfg.get_compiler();
+        let ar = cc2ar(compiler.path(), target);
+        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
+        if let Some(ref ar) = ar {
+            build.verbose(&format!("AR_{} = {:?}", target, ar));
+        }
+        build.cc.insert(target.to_string(), (compiler, ar));
+    }
+
+    // For all host triples we need to find a C++ compiler as well
+    for host in build.config.host.iter() {
+        let mut cfg = gcc::Config::new();
+        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
+           .target(host).host(&build.config.build);
+        let config = build.config.target_config.get(host);
+        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
+            cfg.compiler(cxx);
+        } else {
+            set_compiler(&mut cfg, "g++", host, config);
+        }
+        let compiler = cfg.get_compiler();
+        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
+        build.cxx.insert(host.to_string(), compiler);
+    }
+}
+
+fn set_compiler(cfg: &mut gcc::Config,
+                gnu_compiler: &str,
+                target: &str,
+                config: Option<&Target>) {
+    match target {
+        // When compiling for android we may have the NDK configured in the
+        // config.toml in which case we look there. Otherwise the default
+        // compiler already takes into account the triple in question.
+        t if t.contains("android") => {
+            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
+                let target = target.replace("armv7", "arm");
+                let compiler = format!("{}-{}", target, gnu_compiler);
+                cfg.compiler(ndk.join("bin").join(compiler));
+            }
+        }
+
+        // The default gcc version from OpenBSD may be too old, try using egcc,
+        // which is a gcc version from ports, if this is the case.
+        t if t.contains("openbsd") => {
+            let c = cfg.get_compiler();
+            if !c.path().ends_with(gnu_compiler) {
+                return
+            }
+
+            let output = output(c.to_command().arg("--version"));
+            let i = match output.find(" 4.") {
+                Some(i) => i,
+                None => return,
+            };
+            match output[i + 3..].chars().next().unwrap() {
+                '0' ... '6' => {}
+                _ => return,
+            }
+            let alternative = format!("e{}", gnu_compiler);
+            if Command::new(&alternative).output().is_ok() {
+                cfg.compiler(alternative);
+            }
+        }
+
+        _ => {}
+    }
+}
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
new file mode 100644 (file)
index 0000000..879c383
--- /dev/null
@@ -0,0 +1,110 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Build configuration for Rust's release channels.
+//!
+//! Implements the stable/beta/nightly channel distinctions by setting various
+//! flags like the `unstable_features`, calculating variables like `release` and
+//! `package_vers`, and otherwise indicating to the compiler what it should
+//! print out as part of its version information.
+
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::process::Command;
+
+use build_helper::output;
+use md5;
+
+use Build;
+
+pub fn collect(build: &mut Build) {
+    // Currently the canonical source for the release number (e.g. 1.10.0) and
+    // the prerelease version (e.g. `.1`) is in `mk/main.mk`. We "parse" that
+    // here to learn about those numbers.
+    let mut main_mk = String::new();
+    t!(t!(File::open(build.src.join("mk/main.mk"))).read_to_string(&mut main_mk));
+    let mut release_num = "";
+    let mut prerelease_version = "";
+    for line in main_mk.lines() {
+        if line.starts_with("CFG_RELEASE_NUM") {
+            release_num = line.split('=').skip(1).next().unwrap().trim();
+        }
+        if line.starts_with("CFG_PRERELEASE_VERSION") {
+            prerelease_version = line.split('=').skip(1).next().unwrap().trim();
+        }
+    }
+
+    // Depending on the channel, passed in `./configure --release-channel`,
+    // determine various properties of the build.
+    match &build.config.channel[..] {
+        "stable" => {
+            build.release = release_num.to_string();
+            build.package_vers = build.release.clone();
+            build.unstable_features = false;
+        }
+        "beta" => {
+            build.release = format!("{}-beta{}", release_num,
+                                   prerelease_version);
+            build.package_vers = "beta".to_string();
+            build.unstable_features = false;
+        }
+        "nightly" => {
+            build.release = format!("{}-nightly", release_num);
+            build.package_vers = "nightly".to_string();
+            build.unstable_features = true;
+        }
+        _ => {
+            build.release = format!("{}-dev", release_num);
+            build.package_vers = build.release.clone();
+            build.unstable_features = true;
+        }
+    }
+    build.version = build.release.clone();
+
+    // If we have a git directory, add in some various SHA information of what
+    // commit this compiler was compiled from.
+    if fs::metadata(build.src.join(".git")).is_ok() {
+        let ver_date = output(Command::new("git").current_dir(&build.src)
+                                      .arg("log").arg("-1")
+                                      .arg("--date=short")
+                                      .arg("--pretty=format:%cd"));
+        let ver_hash = output(Command::new("git").current_dir(&build.src)
+                                      .arg("rev-parse").arg("HEAD"));
+        let short_ver_hash = output(Command::new("git")
+                                            .current_dir(&build.src)
+                                            .arg("rev-parse")
+                                            .arg("--short=9")
+                                            .arg("HEAD"));
+        let ver_date = ver_date.trim().to_string();
+        let ver_hash = ver_hash.trim().to_string();
+        let short_ver_hash = short_ver_hash.trim().to_string();
+        build.version.push_str(&format!(" ({} {})", short_ver_hash,
+                                       ver_date));
+        build.ver_date = Some(ver_date.to_string());
+        build.ver_hash = Some(ver_hash);
+        build.short_ver_hash = Some(short_ver_hash);
+    }
+
+    // Calculate this compiler's bootstrap key, which is currently defined as
+    // the first 8 characters of the md5 of the release string.
+    let key = md5::compute(build.release.as_bytes());
+    build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
+                                  key[0], key[1], key[2], key[3]);
+
+    // Slurp up the stage0 bootstrap key as we're bootstrapping from an
+    // otherwise stable compiler.
+    let mut s = String::new();
+    t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
+    if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
+        if let Some(key) = line.split(": ").nth(1) {
+            build.bootstrap_key_stage0 = key.to_string();
+        }
+    }
+}
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
new file mode 100644 (file)
index 0000000..3d8b143
--- /dev/null
@@ -0,0 +1,413 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the various `check-*` targets of the build system.
+//!
+//! This file implements the various regression test suites that we execute on
+//! our CI.
+
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use build_helper::output;
+
+use {Build, Compiler, Mode};
+use util::{self, dylib_path, dylib_path_var};
+
+const ADB_TEST_DIR: &'static str = "/data/tmp";
+
+/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` will verify the validity of all our links in the
+/// documentation to ensure we don't have a bunch of dead ones.
+pub fn linkcheck(build: &Build, stage: u32, host: &str) {
+    println!("Linkcheck stage{} ({})", stage, host);
+    let compiler = Compiler::new(stage, host);
+    build.run(build.tool_cmd(&compiler, "linkchecker")
+                   .arg(build.out.join(host).join("doc")));
+}
+
+/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` will check out a few Rust projects and run `cargo
+/// test` to ensure that we don't regress the test suites there.
+pub fn cargotest(build: &Build, stage: u32, host: &str) {
+    let ref compiler = Compiler::new(stage, host);
+
+    // Configure PATH to find the right rustc. NB. we have to use PATH
+    // and not RUSTC because the Cargo test suite has tests that will
+    // fail if rustc is not spelled `rustc`.
+    let path = build.sysroot(compiler).join("bin");
+    let old_path = ::std::env::var("PATH").expect("");
+    let sep = if cfg!(windows) { ";" } else {":" };
+    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
+
+    // Note that this is a short, cryptic, and not scoped directory name. This
+    // is currently to minimize the length of path on Windows where we otherwise
+    // quickly run into path name limit constraints.
+    let out_dir = build.out.join("ct");
+    t!(fs::create_dir_all(&out_dir));
+
+    build.run(build.tool_cmd(compiler, "cargotest")
+                   .env("PATH", newpath)
+                   .arg(&build.cargo)
+                   .arg(&out_dir));
+}
+
+/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` checks up on various bits and pieces of style and
+/// otherwise just implements a few lint-like checks that are specific to the
+/// compiler itself.
+pub fn tidy(build: &Build, stage: u32, host: &str) {
+    println!("tidy check stage{} ({})", stage, host);
+    let compiler = Compiler::new(stage, host);
+    build.run(build.tool_cmd(&compiler, "tidy")
+                   .arg(build.src.join("src")));
+}
+
+fn testdir(build: &Build, host: &str) -> PathBuf {
+    build.out.join(host).join("test")
+}
+
+/// Executes the `compiletest` tool to run a suite of tests.
+///
+/// Compiles all tests with `compiler` for `target` with the specified
+/// compiletest `mode` and `suite` arguments. For example `mode` can be
+/// "run-pass" or `suite` can be something like `debuginfo`.
+pub fn compiletest(build: &Build,
+                   compiler: &Compiler,
+                   target: &str,
+                   mode: &str,
+                   suite: &str) {
+    println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
+    let mut cmd = build.tool_cmd(compiler, "compiletest");
+
+    // compiletest currently has... a lot of arguments, so let's just pass all
+    // of them!
+
+    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
+    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
+    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
+    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
+    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+    cmd.arg("--mode").arg(mode);
+    cmd.arg("--target").arg(target);
+    cmd.arg("--host").arg(compiler.host);
+    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
+
+    let mut flags = vec!["-Crpath".to_string()];
+    if build.config.rust_optimize_tests {
+        flags.push("-O".to_string());
+    }
+    if build.config.rust_debuginfo_tests {
+        flags.push("-g".to_string());
+    }
+
+    let mut hostflags = build.rustc_flags(&compiler.host);
+    hostflags.extend(flags.clone());
+    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+
+    let mut targetflags = build.rustc_flags(&target);
+    targetflags.extend(flags);
+    targetflags.push(format!("-Lnative={}",
+                             build.test_helpers_out(target).display()));
+    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+
+    // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
+    let python_default = "python";
+    cmd.arg("--docck-python").arg(python_default);
+
+    if build.config.build.ends_with("apple-darwin") {
+        // Force /usr/bin/python on OSX for LLDB tests because we're loading the
+        // LLDB plugin's compiled module which only works with the system python
+        // (namely not Homebrew-installed python)
+        cmd.arg("--lldb-python").arg("/usr/bin/python");
+    } else {
+        cmd.arg("--lldb-python").arg(python_default);
+    }
+
+    if let Some(ref vers) = build.gdb_version {
+        cmd.arg("--gdb-version").arg(vers);
+    }
+    if let Some(ref vers) = build.lldb_version {
+        cmd.arg("--lldb-version").arg(vers);
+    }
+    if let Some(ref dir) = build.lldb_python_dir {
+        cmd.arg("--lldb-python-dir").arg(dir);
+    }
+
+    cmd.args(&build.flags.args);
+
+    if build.config.verbose || build.flags.verbose {
+        cmd.arg("--verbose");
+    }
+
+    // Only pass correct values for these flags for the `run-make` suite as it
+    // requires that a C++ compiler was configured which isn't always the case.
+    if suite == "run-make" {
+        let llvm_config = build.llvm_config(target);
+        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
+        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
+        cmd.arg("--cc").arg(build.cc(target))
+           .arg("--cxx").arg(build.cxx(target))
+           .arg("--cflags").arg(build.cflags(target).join(" "))
+           .arg("--llvm-components").arg(llvm_components.trim())
+           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
+    } else {
+        cmd.arg("--cc").arg("")
+           .arg("--cxx").arg("")
+           .arg("--cflags").arg("")
+           .arg("--llvm-components").arg("")
+           .arg("--llvm-cxxflags").arg("");
+    }
+
+    // Running a C compiler on MSVC requires a few env vars to be set, to be
+    // sure to set them here.
+    if target.contains("msvc") {
+        for &(ref k, ref v) in build.cc[target].0.env() {
+            if k != "PATH" {
+                cmd.env(k, v);
+            }
+        }
+    }
+    build.add_bootstrap_key(compiler, &mut cmd);
+
+    cmd.arg("--adb-path").arg("adb");
+    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
+    if target.contains("android") {
+        // Assume that cc for this target comes from the android sysroot
+        cmd.arg("--android-cross-path")
+           .arg(build.cc(target).parent().unwrap().parent().unwrap());
+    } else {
+        cmd.arg("--android-cross-path").arg("");
+    }
+
+    build.run(&mut cmd);
+}
+
+/// Run `rustdoc --test` for all documentation in `src/doc`.
+///
+/// This will run all tests in our markdown documentation (e.g. the book)
+/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
+/// `compiler`.
+pub fn docs(build: &Build, compiler: &Compiler) {
+    // Do a breadth-first traversal of the `src/doc` directory and just run
+    // tests for all files that end in `*.md`
+    let mut stack = vec![build.src.join("src/doc")];
+
+    while let Some(p) = stack.pop() {
+        if p.is_dir() {
+            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
+            continue
+        }
+
+        if p.extension().and_then(|s| s.to_str()) != Some("md") {
+            continue
+        }
+
+        println!("doc tests for: {}", p.display());
+        markdown_test(build, compiler, &p);
+    }
+}
+
+/// Run the error index generator tool to execute the tests located in the error
+/// index.
+///
+/// The `error_index_generator` tool lives in `src/tools` and is used to
+/// generate a markdown file from the error indexes of the code base which is
+/// then passed to `rustdoc --test`.
+pub fn error_index(build: &Build, compiler: &Compiler) {
+    println!("Testing error-index stage{}", compiler.stage);
+
+    let output = testdir(build, compiler.host).join("error-index.md");
+    build.run(build.tool_cmd(compiler, "error_index_generator")
+                   .arg("markdown")
+                   .arg(&output)
+                   .env("CFG_BUILD", &build.config.build));
+
+    markdown_test(build, compiler, &output);
+}
+
+fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
+    let mut cmd = Command::new(build.rustdoc(compiler));
+    build.add_rustc_lib_path(compiler, &mut cmd);
+    cmd.arg("--test");
+    cmd.arg(markdown);
+    cmd.arg("--test-args").arg(build.flags.args.join(" "));
+    build.run(&mut cmd);
+}
+
+/// Run all unit tests plus documentation tests for an entire crate DAG defined
+/// by a `Cargo.toml`
+///
+/// This is what runs tests for crates like the standard library, compiler, etc.
+/// It essentially is the driver for running `cargo test`.
+///
+/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
+/// arguments, and those arguments are discovered from `Cargo.lock`.
+pub fn krate(build: &Build,
+             compiler: &Compiler,
+             target: &str,
+             mode: Mode) {
+    let (name, path, features) = match mode {
+        Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()),
+        Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()),
+        Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()),
+        _ => panic!("can only test libraries"),
+    };
+    println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
+             compiler.host, target);
+
+    // Build up the base `cargo test` command.
+    let mut cargo = build.cargo(compiler, mode, target, "test");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join(path).join("Cargo.toml"))
+         .arg("--features").arg(features);
+
+    // Generate a list of `-p` arguments to pass to the `cargo test` invocation
+    // by crawling the corresponding Cargo.lock file.
+    let lockfile = build.src.join(path).join("Cargo.lock");
+    let mut contents = String::new();
+    t!(t!(File::open(&lockfile)).read_to_string(&mut contents));
+    let mut lines = contents.lines();
+    while let Some(line) = lines.next() {
+        let prefix = "name = \"";
+        if !line.starts_with(prefix) {
+            continue
+        }
+        lines.next(); // skip `version = ...`
+
+        // skip crates.io or otherwise non-path crates
+        if let Some(line) = lines.next() {
+            if line.starts_with("source") {
+                continue
+            }
+        }
+
+        let crate_name = &line[prefix.len()..line.len() - 1];
+
+        // Right now jemalloc is our only target-specific crate in the sense
+        // that it's not present on all platforms. Custom skip it here for now,
+        // but if we add more this probably wants to get more generalized.
+        if crate_name.contains("jemalloc") {
+            continue
+        }
+
+        cargo.arg("-p").arg(crate_name);
+    }
+
+    // The tests are going to run with the *target* libraries, so we need to
+    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+    //
+    // Note that to run the compiler we need to run with the *host* libraries,
+    // but our wrapper scripts arrange for that to be the case anyway.
+    let mut dylib_path = dylib_path();
+    dylib_path.insert(0, build.sysroot_libdir(compiler, target));
+    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+    if target.contains("android") {
+        build.run(cargo.arg("--no-run"));
+        krate_android(build, compiler, target, mode);
+    } else {
+        cargo.args(&build.flags.args);
+        build.run(&mut cargo);
+    }
+}
+
+fn krate_android(build: &Build,
+                 compiler: &Compiler,
+                 target: &str,
+                 mode: Mode) {
+    let mut tests = Vec::new();
+    let out_dir = build.cargo_out(compiler, mode, target);
+    find_tests(&out_dir, target, &mut tests);
+    find_tests(&out_dir.join("deps"), target, &mut tests);
+
+    for test in tests {
+        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
+
+        let test_file_name = test.file_name().unwrap().to_string_lossy();
+        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
+                          ADB_TEST_DIR,
+                          compiler.stage,
+                          target,
+                          compiler.host,
+                          test_file_name);
+        let program = format!("(cd {dir}; \
+                                LD_LIBRARY_PATH=./{target} ./{test} \
+                                    --logfile {log} \
+                                    {args})",
+                              dir = ADB_TEST_DIR,
+                              target = target,
+                              test = test_file_name,
+                              log = log,
+                              args = build.flags.args.join(" "));
+
+        let output = output(Command::new("adb").arg("shell").arg(&program));
+        println!("{}", output);
+        build.run(Command::new("adb")
+                          .arg("pull")
+                          .arg(&log)
+                          .arg(build.out.join("tmp")));
+        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
+        if !output.contains("result: ok") {
+            panic!("some tests failed");
+        }
+    }
+}
+
+fn find_tests(dir: &Path,
+              target: &str,
+              dst: &mut Vec<PathBuf>) {
+    for e in t!(dir.read_dir()).map(|e| t!(e)) {
+        let file_type = t!(e.file_type());
+        if !file_type.is_file() {
+            continue
+        }
+        let filename = e.file_name().into_string().unwrap();
+        if (target.contains("windows") && filename.ends_with(".exe")) ||
+           (!target.contains("windows") && !filename.contains(".")) {
+            dst.push(e.path());
+        }
+    }
+}
+
+pub fn android_copy_libs(build: &Build,
+                         compiler: &Compiler,
+                         target: &str) {
+    println!("Android copy libs to emulator ({})", target);
+    build.run(Command::new("adb").arg("remount"));
+    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
+    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
+    build.run(Command::new("adb")
+                      .arg("push")
+                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
+                      .arg(ADB_TEST_DIR));
+
+    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
+    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
+
+    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
+        let f = t!(f);
+        let name = f.file_name().into_string().unwrap();
+        if util::is_dylib(&name) {
+            build.run(Command::new("adb")
+                              .arg("push")
+                              .arg(f.path())
+                              .arg(&target_dir));
+        }
+    }
+}
diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs
new file mode 100644 (file)
index 0000000..a466e2e
--- /dev/null
@@ -0,0 +1,49 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of `make clean` in rustbuild.
+//!
+//! Responsible for cleaning out a build directory of all old and stale
+//! artifacts to prepare for a fresh build. Currently doesn't remove the
+//! `build/cache` directory (download cache) or the `build/$target/llvm`
+//! directory as we want that cached between builds.
+
+use std::fs;
+use std::path::Path;
+
+use Build;
+
+pub fn clean(build: &Build) {
+    rm_rf(build, "tmp".as_ref());
+    rm_rf(build, &build.out.join("tmp"));
+
+    for host in build.config.host.iter() {
+
+        let out = build.out.join(host);
+
+        rm_rf(build, &out.join("compiler-rt"));
+        rm_rf(build, &out.join("doc"));
+
+        for stage in 0..4 {
+            rm_rf(build, &out.join(format!("stage{}", stage)));
+            rm_rf(build, &out.join(format!("stage{}-std", stage)));
+            rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
+            rm_rf(build, &out.join(format!("stage{}-tools", stage)));
+            rm_rf(build, &out.join(format!("stage{}-test", stage)));
+        }
+    }
+}
+
+fn rm_rf(build: &Build, path: &Path) {
+    if path.exists() {
+        build.verbose(&format!("removing `{}`", path.display()));
+        t!(fs::remove_dir_all(path));
+    }
+}
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
new file mode 100644 (file)
index 0000000..8ec9c7f
--- /dev/null
@@ -0,0 +1,360 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of compiling various phases of the compiler and standard
+//! library.
+//!
+//! This module contains some of the real meat in the rustbuild build system
+//! which is where Cargo is used to compiler the standard library, libtest, and
+//! compiler. This module is also responsible for assembling the sysroot as it
+//! goes along from the output of the previous stage.
+
+use std::collections::HashMap;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use build_helper::output;
+
+use util::{exe, staticlib, libdir, mtime, is_dylib, copy};
+use {Build, Compiler, Mode};
+
+/// Build the standard library.
+///
+/// This will build the standard library for a particular stage of the build
+/// using the `compiler` targeting the `target` architecture. The artifacts
+/// created will also be linked into the sysroot directory.
+pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
+             compiler.host, target);
+
+    // Move compiler-rt into place as it'll be required by the compiler when
+    // building the standard library to link the dylib of libstd
+    let libdir = build.sysroot_libdir(compiler, target);
+    let _ = fs::remove_dir_all(&libdir);
+    t!(fs::create_dir_all(&libdir));
+    copy(&build.compiler_rt_built.borrow()[target],
+         &libdir.join(staticlib("compiler-rt", target)));
+
+    // Some platforms have startup objects that may be required to produce the
+    // libstd dynamic library, for example.
+    build_startup_objects(build, target, &libdir);
+
+    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
+    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
+    cargo.arg("--features").arg(build.std_features())
+         .arg("--manifest-path")
+         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"));
+
+    if let Some(target) = build.config.target_config.get(target) {
+        if let Some(ref jemalloc) = target.jemalloc {
+            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+        }
+    }
+    if let Some(ref p) = build.config.musl_root {
+        if target.contains("musl") {
+            cargo.env("MUSL_ROOT", p);
+        }
+    }
+
+    build.run(&mut cargo);
+    std_link(build, target, compiler, compiler.host);
+}
+
+/// Link all libstd rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn std_link(build: &Build,
+                target: &str,
+                compiler: &Compiler,
+                host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+
+    // If we're linking one compiler host's output into another, then we weren't
+    // called from the `std` method above. In that case we clean out what's
+    // already there and then also link compiler-rt into place.
+    if host != compiler.host {
+        let _ = fs::remove_dir_all(&libdir);
+        t!(fs::create_dir_all(&libdir));
+        copy(&build.compiler_rt_built.borrow()[target],
+             &libdir.join(staticlib("compiler-rt", target)));
+    }
+    add_to_sysroot(&out_dir, &libdir);
+
+    if target.contains("musl") &&
+       (target.contains("x86_64") || target.contains("i686")) {
+        copy_third_party_objects(build, target, &libdir);
+    }
+}
+
+/// Copies the crt(1,i,n).o startup objects
+///
+/// Only required for musl targets that statically link to libc
+fn copy_third_party_objects(build: &Build, target: &str, into: &Path) {
+    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
+        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
+    }
+}
+
+/// Build and prepare startup objects like rsbegin.o and rsend.o
+///
+/// These are primarily used on Windows right now for linking executables/dlls.
+/// They don't require any library support as they're just plain old object
+/// files, so we just use the nightly snapshot compiler to always build them (as
+/// no other compilers are guaranteed to be available).
+fn build_startup_objects(build: &Build, target: &str, into: &Path) {
+    if !target.contains("pc-windows-gnu") {
+        return
+    }
+    let compiler = Compiler::new(0, &build.config.build);
+    let compiler = build.compiler_path(&compiler);
+
+    for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
+        let file = t!(file);
+        build.run(Command::new(&compiler)
+                          .arg("--emit=obj")
+                          .arg("--out-dir").arg(into)
+                          .arg(file.path()));
+    }
+
+    for obj in ["crt2.o", "dllcrt2.o"].iter() {
+        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
+    }
+}
+
+/// Build libtest.
+///
+/// This will build libtest and supporting libraries for a particular stage of
+/// the build using the `compiler` targeting the `target` architecture. The
+/// artifacts created will also be linked into the sysroot directory.
+pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
+             compiler.host, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+    build.clear_if_dirty(&out_dir, &libstd_shim(build, compiler, target));
+    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
+    build.run(&mut cargo);
+    test_link(build, target, compiler, compiler.host);
+}
+
+/// Link all libtest rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn test_link(build: &Build,
+                 target: &str,
+                 compiler: &Compiler,
+                 host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+    add_to_sysroot(&out_dir, &libdir);
+}
+
+/// Build the compiler.
+///
+/// This will build the compiler for a particular stage of the build using
+/// the `compiler` targeting the `target` architecture. The artifacts
+/// created will also be linked into the sysroot directory.
+pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} compiler artifacts ({} -> {})",
+             compiler.stage, compiler.host, target);
+
+    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+    build.clear_if_dirty(&out_dir, &libtest_shim(build, compiler, target));
+
+    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
+    cargo.arg("--features").arg(build.rustc_features())
+         .arg("--manifest-path")
+         .arg(build.src.join("src/rustc/Cargo.toml"));
+
+    // Set some configuration variables picked up by build scripts and
+    // the compiler alike
+    cargo.env("CFG_RELEASE", &build.release)
+         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
+         .env("CFG_VERSION", &build.version)
+         .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key)
+         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new()))
+         .env("CFG_LIBDIR_RELATIVE", "lib");
+
+    if let Some(ref ver_date) = build.ver_date {
+        cargo.env("CFG_VER_DATE", ver_date);
+    }
+    if let Some(ref ver_hash) = build.ver_hash {
+        cargo.env("CFG_VER_HASH", ver_hash);
+    }
+    if !build.unstable_features {
+        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
+    }
+    cargo.env("LLVM_CONFIG", build.llvm_config(target));
+    if build.config.llvm_static_stdcpp {
+        cargo.env("LLVM_STATIC_STDCPP",
+                  compiler_file(build.cxx(target), "libstdc++.a"));
+    }
+    if let Some(ref s) = build.config.rustc_default_linker {
+        cargo.env("CFG_DEFAULT_LINKER", s);
+    }
+    if let Some(ref s) = build.config.rustc_default_ar {
+        cargo.env("CFG_DEFAULT_AR", s);
+    }
+    build.run(&mut cargo);
+
+    rustc_link(build, target, compiler, compiler.host);
+}
+
+/// Link all librustc rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn rustc_link(build: &Build,
+                  target: &str,
+                  compiler: &Compiler,
+                  host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+    add_to_sysroot(&out_dir, &libdir);
+}
+
+/// Cargo's output path for the standard library in a given stage, compiled
+/// by a particular compiler for the specified target.
+fn libstd_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+    build.cargo_out(compiler, Mode::Libstd, target).join("libstd_shim.rlib")
+}
+
+/// Cargo's output path for libtest in a given stage, compiled by a particular
+/// compiler for the specified target.
+fn libtest_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+    build.cargo_out(compiler, Mode::Libtest, target).join("libtest_shim.rlib")
+}
+
+fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
+    let out = output(Command::new(compiler)
+                            .arg(format!("-print-file-name={}", file)));
+    PathBuf::from(out.trim())
+}
+
+/// Prepare a new compiler from the artifacts in `stage`
+///
+/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
+/// must have been previously produced by the `stage - 1` build.config.build
+/// compiler.
+pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
+    assert!(stage > 0, "the stage0 compiler isn't assembled, it's downloaded");
+    // The compiler that we're assembling
+    let target_compiler = Compiler::new(stage, host);
+
+    // The compiler that compiled the compiler we're assembling
+    let build_compiler = Compiler::new(stage - 1, &build.config.build);
+
+    // Clear out old files
+    let sysroot = build.sysroot(&target_compiler);
+    let _ = fs::remove_dir_all(&sysroot);
+    t!(fs::create_dir_all(&sysroot));
+
+    // Link in all dylibs to the libdir
+    let sysroot_libdir = sysroot.join(libdir(host));
+    t!(fs::create_dir_all(&sysroot_libdir));
+    let src_libdir = build.sysroot_libdir(&build_compiler, host);
+    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
+        let filename = f.file_name().into_string().unwrap();
+        if is_dylib(&filename) {
+            copy(&f.path(), &sysroot_libdir.join(&filename));
+        }
+    }
+
+    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
+
+    // Link the compiler binary itself into place
+    let rustc = out_dir.join(exe("rustc", host));
+    let bindir = sysroot.join("bin");
+    t!(fs::create_dir_all(&bindir));
+    let compiler = build.compiler_path(&Compiler::new(stage, host));
+    let _ = fs::remove_file(&compiler);
+    copy(&rustc, &compiler);
+
+    // See if rustdoc exists to link it into place
+    let rustdoc = exe("rustdoc", host);
+    let rustdoc_src = out_dir.join(&rustdoc);
+    let rustdoc_dst = bindir.join(&rustdoc);
+    if fs::metadata(&rustdoc_src).is_ok() {
+        let _ = fs::remove_file(&rustdoc_dst);
+        copy(&rustdoc_src, &rustdoc_dst);
+    }
+}
+
+/// Link some files into a rustc sysroot.
+///
+/// For a particular stage this will link all of the contents of `out_dir`
+/// into the sysroot of the `host` compiler, assuming the artifacts are
+/// compiled for the specified `target`.
+fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
+    // Collect the set of all files in the dependencies directory, keyed
+    // off the name of the library. We assume everything is of the form
+    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
+    // `<hash>` values for the same name (of old builds).
+    let mut map = HashMap::new();
+    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
+        let filename = file.file_name().into_string().unwrap();
+
+        // We're only interested in linking rlibs + dylibs, other things like
+        // unit tests don't get linked in
+        if !filename.ends_with(".rlib") &&
+           !filename.ends_with(".lib") &&
+           !is_dylib(&filename) {
+            continue
+        }
+        let file = file.path();
+        let dash = filename.find("-").unwrap();
+        let key = (filename[..dash].to_string(),
+                   file.extension().unwrap().to_owned());
+        map.entry(key).or_insert(Vec::new())
+           .push(file.clone());
+    }
+
+    // For all hash values found, pick the most recent one to move into the
+    // sysroot, that should be the one we just built.
+    for (_, paths) in map {
+        let (_, path) = paths.iter().map(|path| {
+            (mtime(&path).seconds(), path)
+        }).max().unwrap();
+        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
+    }
+}
+
+/// Build a tool in `src/tools`
+///
+/// This will build the specified tool with the specified `host` compiler in
+/// `stage` into the normal cargo output directory.
+pub fn tool(build: &Build, stage: u32, host: &str, tool: &str) {
+    println!("Building stage{} tool {} ({})", stage, tool, host);
+
+    let compiler = Compiler::new(stage, host);
+
+    // FIXME: need to clear out previous tool and ideally deps, may require
+    //        isolating output directories or require a pseudo shim step to
+    //        clear out all the info.
+    //
+    //        Maybe when libstd is compiled it should clear out the rustc of the
+    //        corresponding stage?
+    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
+    // build.clear_if_dirty(&out_dir, &libstd_shim(build, stage, &host, target));
+
+    let mut cargo = build.cargo(&compiler, Mode::Tool, host, "build");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join(format!("src/tools/{}/Cargo.toml", tool)));
+    build.run(&mut cargo);
+}
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
new file mode 100644 (file)
index 0000000..498196e
--- /dev/null
@@ -0,0 +1,396 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Serialized configuration of a build.
+//!
+//! This module implements parsing `config.mk` and `config.toml` configuration
+//! files to tweak how the build runs.
+
+use std::collections::HashMap;
+use std::env;
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::PathBuf;
+use std::process;
+
+use num_cpus;
+use rustc_serialize::Decodable;
+use toml::{Parser, Decoder, Value};
+
+/// Global configuration for the entire build and/or bootstrap.
+///
+/// This structure is derived from a combination of both `config.toml` and
+/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
+/// is used all that much, so this is primarily filled out by `config.mk` which
+/// is generated from `./configure`.
+///
+/// Note that this structure is not decoded directly into, but rather it is
+/// filled out from the decoded forms of the structs below. For documentation
+/// each field, see the corresponding fields in
+/// `src/bootstrap/config.toml.example`.
+#[derive(Default)]
+pub struct Config {
+    pub ccache: bool,
+    pub ninja: bool,
+    pub verbose: bool,
+    pub submodules: bool,
+    pub compiler_docs: bool,
+    pub docs: bool,
+    pub target_config: HashMap<String, Target>,
+
+    // llvm codegen options
+    pub llvm_assertions: bool,
+    pub llvm_optimize: bool,
+    pub llvm_version_check: bool,
+    pub llvm_static_stdcpp: bool,
+
+    // rust codegen options
+    pub rust_optimize: bool,
+    pub rust_codegen_units: u32,
+    pub rust_debug_assertions: bool,
+    pub rust_debuginfo: bool,
+    pub rust_rpath: bool,
+    pub rustc_default_linker: Option<String>,
+    pub rustc_default_ar: Option<String>,
+    pub rust_optimize_tests: bool,
+    pub rust_debuginfo_tests: bool,
+
+    pub build: String,
+    pub host: Vec<String>,
+    pub target: Vec<String>,
+    pub rustc: Option<PathBuf>,
+    pub cargo: Option<PathBuf>,
+    pub local_rebuild: bool,
+
+    // libstd features
+    pub debug_jemalloc: bool,
+    pub use_jemalloc: bool,
+
+    // misc
+    pub channel: String,
+    pub musl_root: Option<PathBuf>,
+    pub prefix: Option<String>,
+}
+
+/// Per-target configuration stored in the global configuration structure.
+#[derive(Default)]
+pub struct Target {
+    pub llvm_config: Option<PathBuf>,
+    pub jemalloc: Option<PathBuf>,
+    pub cc: Option<PathBuf>,
+    pub cxx: Option<PathBuf>,
+    pub ndk: Option<PathBuf>,
+}
+
+/// Structure of the `config.toml` file that configuration is read from.
+///
+/// This structure uses `Decodable` to automatically decode a TOML configuration
+/// file into this format, and then this is traversed and written into the above
+/// `Config` structure.
+#[derive(RustcDecodable, Default)]
+struct TomlConfig {
+    build: Option<Build>,
+    llvm: Option<Llvm>,
+    rust: Option<Rust>,
+    target: Option<HashMap<String, TomlTarget>>,
+}
+
+/// TOML representation of various global build decisions.
+#[derive(RustcDecodable, Default, Clone)]
+struct Build {
+    build: Option<String>,
+    host: Vec<String>,
+    target: Vec<String>,
+    cargo: Option<String>,
+    rustc: Option<String>,
+    compiler_docs: Option<bool>,
+    docs: Option<bool>,
+}
+
+/// TOML representation of how the LLVM build is configured.
+#[derive(RustcDecodable, Default)]
+struct Llvm {
+    ccache: Option<bool>,
+    ninja: Option<bool>,
+    assertions: Option<bool>,
+    optimize: Option<bool>,
+    version_check: Option<bool>,
+    static_libstdcpp: Option<bool>,
+}
+
+/// TOML representation of how the Rust build is configured.
+#[derive(RustcDecodable, Default)]
+struct Rust {
+    optimize: Option<bool>,
+    codegen_units: Option<u32>,
+    debug_assertions: Option<bool>,
+    debuginfo: Option<bool>,
+    debug_jemalloc: Option<bool>,
+    use_jemalloc: Option<bool>,
+    default_linker: Option<String>,
+    default_ar: Option<String>,
+    channel: Option<String>,
+    musl_root: Option<String>,
+    rpath: Option<bool>,
+    optimize_tests: Option<bool>,
+    debuginfo_tests: Option<bool>,
+}
+
+/// TOML representation of how each build target is configured.
+#[derive(RustcDecodable, Default)]
+struct TomlTarget {
+    llvm_config: Option<String>,
+    jemalloc: Option<String>,
+    cc: Option<String>,
+    cxx: Option<String>,
+    android_ndk: Option<String>,
+}
+
+impl Config {
+    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
+        let mut config = Config::default();
+        config.llvm_optimize = true;
+        config.use_jemalloc = true;
+        config.rust_optimize = true;
+        config.rust_optimize_tests = true;
+        config.submodules = true;
+        config.docs = true;
+        config.rust_rpath = true;
+        config.rust_codegen_units = 1;
+        config.build = build.to_string();
+        config.channel = "dev".to_string();
+
+        let toml = file.map(|file| {
+            let mut f = t!(File::open(&file));
+            let mut toml = String::new();
+            t!(f.read_to_string(&mut toml));
+            let mut p = Parser::new(&toml);
+            let table = match p.parse() {
+                Some(table) => table,
+                None => {
+                    println!("failed to parse TOML configuration:");
+                    for err in p.errors.iter() {
+                        let (loline, locol) = p.to_linecol(err.lo);
+                        let (hiline, hicol) = p.to_linecol(err.hi);
+                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
+                                 hicol, err.desc);
+                    }
+                    process::exit(2);
+                }
+            };
+            let mut d = Decoder::new(Value::Table(table));
+            match Decodable::decode(&mut d) {
+                Ok(cfg) => cfg,
+                Err(e) => {
+                    println!("failed to decode TOML: {}", e);
+                    process::exit(2);
+                }
+            }
+        }).unwrap_or_else(|| TomlConfig::default());
+
+        let build = toml.build.clone().unwrap_or(Build::default());
+        set(&mut config.build, build.build.clone());
+        config.host.push(config.build.clone());
+        for host in build.host.iter() {
+            if !config.host.contains(host) {
+                config.host.push(host.clone());
+            }
+        }
+        for target in config.host.iter().chain(&build.target) {
+            if !config.target.contains(target) {
+                config.target.push(target.clone());
+            }
+        }
+        config.rustc = build.rustc.map(PathBuf::from);
+        config.cargo = build.cargo.map(PathBuf::from);
+        set(&mut config.compiler_docs, build.compiler_docs);
+        set(&mut config.docs, build.docs);
+
+        if let Some(ref llvm) = toml.llvm {
+            set(&mut config.ccache, llvm.ccache);
+            set(&mut config.ninja, llvm.ninja);
+            set(&mut config.llvm_assertions, llvm.assertions);
+            set(&mut config.llvm_optimize, llvm.optimize);
+            set(&mut config.llvm_version_check, llvm.version_check);
+            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
+        }
+        if let Some(ref rust) = toml.rust {
+            set(&mut config.rust_debug_assertions, rust.debug_assertions);
+            set(&mut config.rust_debuginfo, rust.debuginfo);
+            set(&mut config.rust_optimize, rust.optimize);
+            set(&mut config.rust_optimize_tests, rust.optimize_tests);
+            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
+            set(&mut config.rust_rpath, rust.rpath);
+            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
+            set(&mut config.use_jemalloc, rust.use_jemalloc);
+            set(&mut config.channel, rust.channel.clone());
+            config.rustc_default_linker = rust.default_linker.clone();
+            config.rustc_default_ar = rust.default_ar.clone();
+            config.musl_root = rust.musl_root.clone().map(PathBuf::from);
+
+            match rust.codegen_units {
+                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
+                Some(n) => config.rust_codegen_units = n,
+                None => {}
+            }
+        }
+
+        if let Some(ref t) = toml.target {
+            for (triple, cfg) in t {
+                let mut target = Target::default();
+
+                if let Some(ref s) = cfg.llvm_config {
+                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
+                }
+                if let Some(ref s) = cfg.jemalloc {
+                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
+                }
+                if let Some(ref s) = cfg.android_ndk {
+                    target.ndk = Some(env::current_dir().unwrap().join(s));
+                }
+                target.cxx = cfg.cxx.clone().map(PathBuf::from);
+                target.cc = cfg.cc.clone().map(PathBuf::from);
+
+                config.target_config.insert(triple.clone(), target);
+            }
+        }
+
+        return config
+    }
+
+    /// "Temporary" routine to parse `config.mk` into this configuration.
+    ///
+    /// While we still have `./configure` this implements the ability to decode
+    /// that configuration into this. This isn't exactly a full-blown makefile
+    /// parser, but hey it gets the job done!
+    pub fn update_with_config_mk(&mut self) {
+        let mut config = String::new();
+        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
+        for line in config.lines() {
+            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
+            let key = parts.next().unwrap();
+            let value = match parts.next() {
+                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
+                Some(n) => n,
+                None => continue
+            };
+
+            macro_rules! check {
+                ($(($name:expr, $val:expr),)*) => {
+                    if value == "1" {
+                        $(
+                            if key == concat!("CFG_ENABLE_", $name) {
+                                $val = true;
+                                continue
+                            }
+                            if key == concat!("CFG_DISABLE_", $name) {
+                                $val = false;
+                                continue
+                            }
+                        )*
+                    }
+                }
+            }
+
+            check! {
+                ("CCACHE", self.ccache),
+                ("MANAGE_SUBMODULES", self.submodules),
+                ("COMPILER_DOCS", self.compiler_docs),
+                ("DOCS", self.docs),
+                ("LLVM_ASSERTIONS", self.llvm_assertions),
+                ("OPTIMIZE_LLVM", self.llvm_optimize),
+                ("LLVM_VERSION_CHECK", self.llvm_version_check),
+                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
+                ("OPTIMIZE", self.rust_optimize),
+                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
+                ("DEBUGINFO", self.rust_debuginfo),
+                ("JEMALLOC", self.use_jemalloc),
+                ("DEBUG_JEMALLOC", self.debug_jemalloc),
+                ("RPATH", self.rust_rpath),
+                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
+                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
+                ("LOCAL_REBUILD", self.local_rebuild),
+            }
+
+            match key {
+                "CFG_BUILD" => self.build = value.to_string(),
+                "CFG_HOST" => {
+                    self.host = value.split(" ").map(|s| s.to_string())
+                                     .collect();
+                }
+                "CFG_TARGET" => {
+                    self.target = value.split(" ").map(|s| s.to_string())
+                                       .collect();
+                }
+                "CFG_MUSL_ROOT" if value.len() > 0 => {
+                    self.musl_root = Some(PathBuf::from(value));
+                }
+                "CFG_DEFAULT_AR" if value.len() > 0 => {
+                    self.rustc_default_ar = Some(value.to_string());
+                }
+                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
+                    self.rustc_default_linker = Some(value.to_string());
+                }
+                "CFG_RELEASE_CHANNEL" => {
+                    self.channel = value.to_string();
+                }
+                "CFG_PREFIX" => {
+                    self.prefix = Some(value.to_string());
+                }
+                "CFG_LLVM_ROOT" if value.len() > 0 => {
+                    let target = self.target_config.entry(self.build.clone())
+                                     .or_insert(Target::default());
+                    let root = PathBuf::from(value);
+                    target.llvm_config = Some(root.join("bin/llvm-config"));
+                }
+                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
+                    let target = self.target_config.entry(self.build.clone())
+                                     .or_insert(Target::default());
+                    target.jemalloc = Some(PathBuf::from(value));
+                }
+                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
+                    let target = "arm-linux-androideabi".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
+                    let target = "armv7-linux-androideabi".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
+                    let target = "i686-linux-android".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
+                    let target = "aarch64-linux-android".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
+                    self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
+                    self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
+                }
+                _ => {}
+            }
+        }
+    }
+}
+
+fn set<T>(field: &mut T, val: Option<T>) {
+    if let Some(v) = val {
+        *field = v;
+    }
+}
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
new file mode 100644 (file)
index 0000000..1cf71c3
--- /dev/null
@@ -0,0 +1,319 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the various distribution aspects of the compiler.
+//!
+//! This module is responsible for creating tarballs of the standard library,
+//! compiler, and documentation. This ends up being what we distribute to
+//! everyone as well.
+//!
+//! No tarball is actually created literally in this file, but rather we shell
+//! out to `rust-installer` still. This may one day be replaced with bits and
+//! pieces of `rustup.rs`!
+
+use std::fs::{self, File};
+use std::io::Write;
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use {Build, Compiler};
+use util::{cp_r, libdir, is_dylib};
+
+fn package_vers(build: &Build) -> &str {
+    match &build.config.channel[..] {
+        "stable" => &build.release,
+        "beta" => "beta",
+        "nightly" => "nightly",
+        _ => &build.release,
+    }
+}
+
+fn distdir(build: &Build) -> PathBuf {
+    build.out.join("dist")
+}
+
+fn tmpdir(build: &Build) -> PathBuf {
+    build.out.join("tmp/dist")
+}
+
+/// Builds the `rust-docs` installer component.
+///
+/// Slurps up documentation from the `stage`'s `host`.
+pub fn docs(build: &Build, stage: u32, host: &str) {
+    println!("Dist docs stage{} ({})", stage, host);
+    let name = format!("rust-docs-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, name));
+    let _ = fs::remove_dir_all(&image);
+
+    let dst = image.join("share/doc/rust/html");
+    t!(fs::create_dir_all(&dst));
+    let src = build.out.join(host).join("doc");
+    cp_r(&src, &dst);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust-Documentation")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-documentation-is-installed.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rust-docs")
+       .arg("--legacy-manifest-dirs=rustlib,cargo")
+       .arg("--bulk-dirs=share/doc/rust/html");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+
+    // As part of this step, *also* copy the docs directory to a directory which
+    // buildbot typically uploads.
+    if host == build.config.build {
+        let dst = distdir(build).join("doc").join(&build.package_vers);
+        t!(fs::create_dir_all(&dst));
+        cp_r(&src, &dst);
+    }
+}
+
+/// Build the `rust-mingw` installer component.
+///
+/// This contains all the bits and pieces to run the MinGW Windows targets
+/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
+/// Currently just shells out to a python script, but that should be rewritten
+/// in Rust.
+pub fn mingw(build: &Build, host: &str) {
+    println!("Dist mingw ({})", host);
+    let name = format!("rust-mingw-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+    let _ = fs::remove_dir_all(&image);
+
+    // The first argument to the script is a "temporary directory" which is just
+    // thrown away (this contains the runtime DLLs included in the rustc package
+    // above) and the second argument is where to place all the MinGW components
+    // (which is what we want).
+    //
+    // FIXME: this script should be rewritten into Rust
+    let mut cmd = Command::new("python");
+    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
+       .arg(tmpdir(build))
+       .arg(&image)
+       .arg(host);
+    build.run(&mut cmd);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust-MinGW")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-MinGW-is-installed.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rust-mingw")
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+}
+
+/// Creates the `rustc` installer component.
+pub fn rustc(build: &Build, stage: u32, host: &str) {
+    println!("Dist rustc stage{} ({})", stage, host);
+    let name = format!("rustc-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+    let _ = fs::remove_dir_all(&image);
+    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
+    let _ = fs::remove_dir_all(&overlay);
+
+    // Prepare the rustc "image", what will actually end up getting installed
+    prepare_image(build, stage, host, &image);
+
+    // Prepare the overlay which is part of the tarball but won't actually be
+    // installed
+    let cp = |file: &str| {
+        install(&build.src.join(file), &overlay, 0o644);
+    };
+    cp("COPYRIGHT");
+    cp("LICENSE-APACHE");
+    cp("LICENSE-MIT");
+    cp("README.md");
+    // tiny morsel of metadata is used by rust-packaging
+    let version = &build.version;
+    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+    // On MinGW we've got a few runtime DLL dependencies that we need to
+    // include. The first argument to this script is where to put these DLLs
+    // (the image we're creating), and the second argument is a junk directory
+    // to ignore all other MinGW stuff the script creates.
+    //
+    // On 32-bit MinGW we're always including a DLL which needs some extra
+    // licenses to distribute. On 64-bit MinGW we don't actually distribute
+    // anything requiring us to distribute a license, but it's likely the
+    // install will *also* include the rust-mingw package, which also needs
+    // licenses, so to be safe we just include it here in all MinGW packages.
+    //
+    // FIXME: this script should be rewritten into Rust
+    if host.contains("pc-windows-gnu") {
+        let mut cmd = Command::new("python");
+        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
+           .arg(&image)
+           .arg(tmpdir(build))
+           .arg(host);
+        build.run(&mut cmd);
+
+        let dst = image.join("share/doc");
+        t!(fs::create_dir_all(&dst));
+        cp_r(&build.src.join("src/etc/third-party"), &dst);
+    }
+
+    // Finally, wrap everything up in a nice tarball!
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-is-ready-to-roll.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rustc")
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+    t!(fs::remove_dir_all(&overlay));
+
+    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
+        let src = build.sysroot(&Compiler::new(stage, host));
+        let libdir = libdir(host);
+
+        // Copy rustc/rustdoc binaries
+        t!(fs::create_dir_all(image.join("bin")));
+        cp_r(&src.join("bin"), &image.join("bin"));
+
+        // Copy runtime DLLs needed by the compiler
+        if libdir != "bin" {
+            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
+                let name = entry.file_name();
+                if let Some(s) = name.to_str() {
+                    if is_dylib(s) {
+                        install(&entry.path(), &image.join(libdir), 0o644);
+                    }
+                }
+            }
+        }
+
+        // Man pages
+        t!(fs::create_dir_all(image.join("share/man/man1")));
+        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
+
+        // Debugger scripts
+        debugger_scripts(build, &image, host);
+
+        // Misc license info
+        let cp = |file: &str| {
+            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+        };
+        cp("COPYRIGHT");
+        cp("LICENSE-APACHE");
+        cp("LICENSE-MIT");
+        cp("README.md");
+    }
+}
+
+/// Copies debugger scripts for `host` into the `sysroot` specified.
+pub fn debugger_scripts(build: &Build,
+                        sysroot: &Path,
+                        host: &str) {
+    let cp_debugger_script = |file: &str| {
+        let dst = sysroot.join("lib/rustlib/etc");
+        t!(fs::create_dir_all(&dst));
+        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+    };
+    if host.contains("windows-msvc") {
+        // no debugger scripts
+    } else {
+        cp_debugger_script("debugger_pretty_printers_common.py");
+
+        // gdb debugger scripts
+        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+                0o755);
+
+        cp_debugger_script("gdb_load_rust_pretty_printers.py");
+        cp_debugger_script("gdb_rust_pretty_printing.py");
+
+        // lldb debugger scripts
+        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+                0o755);
+
+        cp_debugger_script("lldb_rust_formatters.py");
+    }
+}
+
+/// Creates the `rust-std` installer component as compiled by `compiler` for the
+/// target `target`.
+pub fn std(build: &Build, compiler: &Compiler, target: &str) {
+    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
+             target);
+    let name = format!("rust-std-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+    let _ = fs::remove_dir_all(&image);
+
+    let dst = image.join("lib/rustlib").join(target);
+    t!(fs::create_dir_all(&dst));
+    let src = build.sysroot(compiler).join("lib/rustlib");
+    cp_r(&src.join(target), &dst);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=std-is-standing-at-the-ready.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, target))
+       .arg(format!("--component-name=rust-std-{}", target))
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+}
+
+fn install(src: &Path, dstdir: &Path, perms: u32) {
+    let dst = dstdir.join(src.file_name().unwrap());
+    t!(fs::create_dir_all(dstdir));
+    t!(fs::copy(src, &dst));
+    chmod(&dst, perms);
+}
+
+#[cfg(unix)]
+fn chmod(path: &Path, perms: u32) {
+    use std::os::unix::fs::*;
+    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
+}
+#[cfg(windows)]
+fn chmod(_path: &Path, _perms: u32) {}
+
+// We have to run a few shell scripts, which choke quite a bit on both `\`
+// characters and on `C:\` paths, so normalize both of them away.
+fn sanitize_sh(path: &Path) -> String {
+    let path = path.to_str().unwrap().replace("\\", "/");
+    return change_drive(&path).unwrap_or(path);
+
+    fn change_drive(s: &str) -> Option<String> {
+        let mut ch = s.chars();
+        let drive = ch.next().unwrap_or('C');
+        if ch.next() != Some(':') {
+            return None
+        }
+        if ch.next() != Some('/') {
+            return None
+        }
+        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
+    }
+}
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
new file mode 100644 (file)
index 0000000..ac90ab5
--- /dev/null
@@ -0,0 +1,207 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Documentation generation for rustbuild.
+//!
+//! This module implements generation for all bits and pieces of documentation
+//! for the Rust project. This notably includes suites like the rust book, the
+//! nomicon, standalone documentation, etc.
+//!
+//! Everything here is basically just a shim around calling either `rustbook` or
+//! `rustdoc`.
+
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::Path;
+use std::process::Command;
+
+use {Build, Compiler, Mode};
+use util::{up_to_date, cp_r};
+
+/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
+/// `name` into the `out` path.
+///
+/// This will not actually generate any documentation if the documentation has
+/// already been generated.
+pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {
+    t!(fs::create_dir_all(out));
+
+    let out = out.join(name);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let src = build.src.join("src/doc").join(name);
+    let index = out.join("index.html");
+    let rustbook = build.tool(&compiler, "rustbook");
+    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
+        return
+    }
+    println!("Rustbook stage{} ({}) - {}", stage, target, name);
+    let _ = fs::remove_dir_all(&out);
+    build.run(build.tool_cmd(&compiler, "rustbook")
+                   .arg("build")
+                   .arg(&src)
+                   .arg(out));
+}
+
+/// Generates all standalone documentation as compiled by the rustdoc in `stage`
+/// for the `target` into `out`.
+///
+/// This will list all of `src/doc` looking for markdown files and appropriately
+/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
+/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
+///
+/// In the end, this is just a glorified wrapper around rustdoc!
+pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} standalone ({})", stage, target);
+    t!(fs::create_dir_all(out));
+
+    let compiler = Compiler::new(stage, &build.config.build);
+
+    let favicon = build.src.join("src/doc/favicon.inc");
+    let footer = build.src.join("src/doc/footer.inc");
+    let full_toc = build.src.join("src/doc/full-toc.inc");
+    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
+
+    let version_input = build.src.join("src/doc/version_info.html.template");
+    let version_info = out.join("version_info.html");
+
+    if !up_to_date(&version_input, &version_info) {
+        let mut info = String::new();
+        t!(t!(File::open(&version_input)).read_to_string(&mut info));
+        let blank = String::new();
+        let short = build.short_ver_hash.as_ref().unwrap_or(&blank);
+        let hash = build.ver_hash.as_ref().unwrap_or(&blank);
+        let info = info.replace("VERSION", &build.release)
+                       .replace("SHORT_HASH", short)
+                       .replace("STAMP", hash);
+        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
+    }
+
+    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
+        let file = t!(file);
+        let path = file.path();
+        let filename = path.file_name().unwrap().to_str().unwrap();
+        if !filename.ends_with(".md") || filename == "README.md" {
+            continue
+        }
+
+        let html = out.join(filename).with_extension("html");
+        let rustdoc = build.rustdoc(&compiler);
+        if up_to_date(&path, &html) &&
+           up_to_date(&footer, &html) &&
+           up_to_date(&favicon, &html) &&
+           up_to_date(&full_toc, &html) &&
+           up_to_date(&version_info, &html) &&
+           up_to_date(&rustdoc, &html) {
+            continue
+        }
+
+        let mut cmd = Command::new(&rustdoc);
+        build.add_rustc_lib_path(&compiler, &mut cmd);
+        cmd.arg("--html-after-content").arg(&footer)
+           .arg("--html-before-content").arg(&version_info)
+           .arg("--html-in-header").arg(&favicon)
+           .arg("--markdown-playground-url")
+           .arg("https://play.rust-lang.org/")
+           .arg("-o").arg(out)
+           .arg(&path);
+
+        if filename == "reference.md" {
+           cmd.arg("--html-in-header").arg(&full_toc);
+        }
+
+        if filename == "not_found.md" {
+            cmd.arg("--markdown-no-toc")
+               .arg("--markdown-css")
+               .arg("https://doc.rust-lang.org/rust.css");
+        } else {
+            cmd.arg("--markdown-css").arg("rust.css");
+        }
+        build.run(&mut cmd);
+    }
+}
+
+/// Compile all standard library documentation.
+///
+/// This will generate all documentation for the standard library and its
+/// dependencies. This is largely just a wrapper around `cargo doc`.
+pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} std ({})", stage, target);
+    t!(fs::create_dir_all(out));
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Libstd)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+
+    build.clear_if_dirty(&out_dir, &rustdoc);
+
+    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"))
+         .arg("--features").arg(build.std_features());
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Compile all libtest documentation.
+///
+/// This will generate all documentation for libtest and its dependencies. This
+/// is largely just a wrapper around `cargo doc`.
+pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} test ({})", stage, target);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Libtest)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+
+    build.clear_if_dirty(&out_dir, &rustdoc);
+
+    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Generate all compiler documentation.
+///
+/// This will generate all documentation for the compiler libraries and their
+/// dependencies. This is largely just a wrapper around `cargo doc`.
+pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} compiler ({})", stage, target);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Librustc)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+    if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) {
+        t!(fs::remove_dir_all(&out_dir));
+    }
+    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/Cargo.toml"))
+         .arg("--features").arg(build.rustc_features());
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Generates the HTML rendered error-index by running the
+/// `error_index_generator` tool.
+pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} error index ({})", stage, target);
+    t!(fs::create_dir_all(out));
+    let compiler = Compiler::new(stage, &build.config.build);
+    let mut index = build.tool_cmd(&compiler, "error_index_generator");
+    index.arg("html");
+    index.arg(out.join("error-index.html"));
+
+    // FIXME: shouldn't have to pass this env var
+    index.env("CFG_BUILD", &build.config.build);
+
+    build.run(&mut index);
+}
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
new file mode 100644 (file)
index 0000000..d925997
--- /dev/null
@@ -0,0 +1,103 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Command-line interface of the rustbuild build system.
+//!
+//! This module implements the command-line parsing of the build system which
+//! has various flags to configure how it's run.
+
+use std::fs;
+use std::path::PathBuf;
+use std::process;
+use std::slice;
+
+use getopts::Options;
+
+/// Deserialized version of all flags for this compile.
+pub struct Flags {
+    pub verbose: bool,
+    pub stage: Option<u32>,
+    pub build: String,
+    pub host: Filter,
+    pub target: Filter,
+    pub step: Vec<String>,
+    pub config: Option<PathBuf>,
+    pub src: Option<PathBuf>,
+    pub jobs: Option<u32>,
+    pub args: Vec<String>,
+    pub clean: bool,
+}
+
+pub struct Filter {
+    values: Vec<String>,
+}
+
+impl Flags {
+    pub fn parse(args: &[String]) -> Flags {
+        let mut opts = Options::new();
+        opts.optflag("v", "verbose", "use verbose output");
+        opts.optopt("", "config", "TOML configuration file for build", "FILE");
+        opts.optmulti("", "host", "host targets to build", "HOST");
+        opts.reqopt("", "build", "build target of the stage0 compiler", "BUILD");
+        opts.optmulti("", "target", "targets to build", "TARGET");
+        opts.optmulti("s", "step", "build step to execute", "STEP");
+        opts.optopt("", "stage", "stage to build", "N");
+        opts.optopt("", "src", "path to repo root", "DIR");
+        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
+        opts.optflag("", "clean", "clean output directory");
+        opts.optflag("h", "help", "print this help message");
+
+        let usage = |n| -> ! {
+            let brief = format!("Usage: rust.py [options]");
+            print!("{}", opts.usage(&brief));
+            process::exit(n);
+        };
+
+        let m = opts.parse(args).unwrap_or_else(|e| {
+            println!("failed to parse options: {}", e);
+            usage(1);
+        });
+        if m.opt_present("h") {
+            usage(0);
+        }
+
+        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
+            if fs::metadata("config.toml").is_ok() {
+                Some(PathBuf::from("config.toml"))
+            } else {
+                None
+            }
+        });
+
+        Flags {
+            verbose: m.opt_present("v"),
+            clean: m.opt_present("clean"),
+            stage: m.opt_str("stage").map(|j| j.parse().unwrap()),
+            build: m.opt_str("build").unwrap(),
+            host: Filter { values: m.opt_strs("host") },
+            target: Filter { values: m.opt_strs("target") },
+            step: m.opt_strs("step"),
+            config: cfg_file,
+            src: m.opt_str("src").map(PathBuf::from),
+            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
+            args: m.free.clone(),
+        }
+    }
+}
+
+impl Filter {
+    pub fn contains(&self, name: &str) -> bool {
+        self.values.len() == 0 || self.values.iter().any(|s| s == name)
+    }
+
+    pub fn iter(&self) -> slice::Iter<String> {
+        self.values.iter()
+    }
+}
diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs
new file mode 100644 (file)
index 0000000..4558e6f
--- /dev/null
@@ -0,0 +1,111 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Job management on Windows for bootstrapping
+//!
+//! Most of the time when you're running a build system (e.g. make) you expect
+//! Ctrl-C or abnormal termination to actually terminate the entire tree of
+//! process in play, not just the one at the top. This currently works "by
+//! default" on Unix platforms because Ctrl-C actually sends a signal to the
+//! *process group* rather than the parent process, so everything will get torn
+//! down. On Windows, however, this does not happen and Ctrl-C just kills the
+//! parent process.
+//!
+//! To achieve the same semantics on Windows we use Job Objects to ensure that
+//! all processes die at the same time. Job objects have a mode of operation
+//! where when all handles to the object are closed it causes all child
+//! processes associated with the object to be terminated immediately.
+//! Conveniently whenever a process in the job object spawns a new process the
+//! child will be associated with the job object as well. This means if we add
+//! ourselves to the job object we create then everything will get torn down!
+//!
+//! Unfortunately most of the time the build system is actually called from a
+//! python wrapper (which manages things like building the build system) so this
+//! all doesn't quite cut it so far. To go the last mile we duplicate the job
+//! object handle into our parent process (a python process probably) and then
+//! close our own handle. This means that the only handle to the job object
+//! resides in the parent python process, so when python dies the whole build
+//! system dies (as one would probably expect!).
+//!
+//! Note that this module has a #[cfg(windows)] above it as none of this logic
+//! is required on Unix.
+
+extern crate kernel32;
+extern crate winapi;
+
+use std::env;
+use std::io;
+use std::mem;
+
+use self::winapi::*;
+use self::kernel32::*;
+
+pub unsafe fn setup() {
+    // Create a new job object for us to use
+    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
+    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
+
+    // Indicate that when all handles to the job object are gone that all
+    // process in the object should be killed. Note that this includes our
+    // entire process tree by default because we've added ourselves and our
+    // children will reside in the job by default.
+    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
+    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+    let r = SetInformationJobObject(job,
+                                    JobObjectExtendedLimitInformation,
+                                    &mut info as *mut _ as LPVOID,
+                                    mem::size_of_val(&info) as DWORD);
+    assert!(r != 0, "{}", io::Error::last_os_error());
+
+    // Assign our process to this job object. Note that if this fails, one very
+    // likely reason is that we are ourselves already in a job object! This can
+    // happen on the build bots that we've got for Windows, or if just anyone
+    // else is instrumenting the build. In this case we just bail out
+    // immediately and assume that they take care of it.
+    //
+    // Also note that nested jobs (why this might fail) are supported in recent
+    // versions of Windows, but the version of Windows that our bots are running
+    // at least don't support nested job objects.
+    let r = AssignProcessToJobObject(job, GetCurrentProcess());
+    if r == 0 {
+        CloseHandle(job);
+        return
+    }
+
+    // If we've got a parent process (e.g. the python script that called us)
+    // then move ownership of this job object up to them. That way if the python
+    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
+    //
+    // If we don't have a parent (e.g. this was run directly) then we
+    // intentionally leak the job object handle. When our process exits
+    // (normally or abnormally) it will close the handle implicitly, causing all
+    // processes in the job to be cleaned up.
+    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
+        Ok(s) => s,
+        Err(..) => return,
+    };
+
+    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
+    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
+    let mut parent_handle = 0 as *mut _;
+    let r = DuplicateHandle(GetCurrentProcess(), job,
+                            parent, &mut parent_handle,
+                            0, FALSE, DUPLICATE_SAME_ACCESS);
+
+    // If this failed, well at least we tried! An example of DuplicateHandle
+    // failing in the past has been when the wrong python2 package spawed this
+    // build system (e.g. the `python2` package in MSYS instead of
+    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
+    // mode" here is that we only clean everything up when the build system
+    // dies, not when the python parent does, so not too bad.
+    if r != 0 {
+        CloseHandle(job);
+    }
+}
index ef6184d6ca76cf8fc3dbbb6b40e6dd0452a75098..943271fc8a641665734531b3393b32d4f37d1e5e 100644 (file)
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! A small helper library shared between the build system's executables
+//! Implementation of rustbuild, the Rust build system.
 //!
-//! Currently this just has some simple utilities for modifying the dynamic
-//! library lookup path.
+//! This module, and its descendants, are the implementation of the Rust build
+//! system. Most of this build system is backed by Cargo but the outer layer
+//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
+//! builds, building artifacts like LLVM, etc.
+//!
+//! More documentation can be found in each respective module below.
+
+extern crate build_helper;
+extern crate cmake;
+extern crate filetime;
+extern crate gcc;
+extern crate getopts;
+extern crate md5;
+extern crate num_cpus;
+extern crate rustc_serialize;
+extern crate toml;
 
+use std::cell::RefCell;
+use std::collections::HashMap;
 use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-
-/// Returns the environment variable which the dynamic library lookup path
-/// resides in for this platform.
-pub fn dylib_path_var() -> &'static str {
-    if cfg!(target_os = "windows") {
-        "PATH"
-    } else if cfg!(target_os = "macos") {
-        "DYLD_LIBRARY_PATH"
-    } else {
-        "LD_LIBRARY_PATH"
+use std::fs::{self, File};
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use build_helper::{run_silent, output};
+
+use util::{exe, mtime, libdir, add_lib_path};
+
+/// A helper macro to `unwrap` a result except also print out details like:
+///
+/// * The file/line of the panic
+/// * The expression that failed
+/// * The error itself
+///
+/// This is currently used judiciously throughout the build system rather than
+/// using a `Result` with `try!`, but this may change on day...
+macro_rules! t {
+    ($e:expr) => (match $e {
+        Ok(e) => e,
+        Err(e) => panic!("{} failed with {}", stringify!($e), e),
+    })
+}
+
+mod cc;
+mod channel;
+mod check;
+mod clean;
+mod compile;
+mod config;
+mod dist;
+mod doc;
+mod flags;
+mod native;
+mod sanity;
+mod step;
+pub mod util;
+
+#[cfg(windows)]
+mod job;
+
+#[cfg(not(windows))]
+mod job {
+    pub unsafe fn setup() {}
+}
+
+pub use config::Config;
+pub use flags::Flags;
+
+/// A structure representing a Rust compiler.
+///
+/// Each compiler has a `stage` that it is associated with and a `host` that
+/// corresponds to the platform the compiler runs on. This structure is used as
+/// a parameter to many methods below.
+#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
+pub struct Compiler<'a> {
+    stage: u32,
+    host: &'a str,
+}
+
+/// Global configuration for the build system.
+///
+/// This structure transitively contains all configuration for the build system.
+/// All filesystem-encoded configuration is in `config`, all flags are in
+/// `flags`, and then parsed or probed information is listed in the keys below.
+///
+/// This structure is a parameter of almost all methods in the build system,
+/// although most functions are implemented as free functions rather than
+/// methods specifically on this structure itself (to make it easier to
+/// organize).
+pub struct Build {
+    // User-specified configuration via config.toml
+    config: Config,
+
+    // User-specified configuration via CLI flags
+    flags: Flags,
+
+    // Derived properties from the above two configurations
+    cargo: PathBuf,
+    rustc: PathBuf,
+    src: PathBuf,
+    out: PathBuf,
+    release: String,
+    unstable_features: bool,
+    ver_hash: Option<String>,
+    short_ver_hash: Option<String>,
+    ver_date: Option<String>,
+    version: String,
+    package_vers: String,
+    bootstrap_key: String,
+    bootstrap_key_stage0: String,
+
+    // Probed tools at runtime
+    gdb_version: Option<String>,
+    lldb_version: Option<String>,
+    lldb_python_dir: Option<String>,
+
+    // Runtime state filled in later on
+    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
+    cxx: HashMap<String, gcc::Tool>,
+    compiler_rt_built: RefCell<HashMap<String, PathBuf>>,
+}
+
+/// The various "modes" of invoking Cargo.
+///
+/// These entries currently correspond to the various output directories of the
+/// build system, with each mod generating output in a different directory.
+#[derive(Clone, Copy)]
+pub enum Mode {
+    /// This cargo is going to build the standard library, placing output in the
+    /// "stageN-std" directory.
+    Libstd,
+
+    /// This cargo is going to build libtest, placing output in the
+    /// "stageN-test" directory.
+    Libtest,
+
+    /// This cargo is going to build librustc and compiler libraries, placing
+    /// output in the "stageN-rustc" directory.
+    Librustc,
+
+    /// This cargo is going to some build tool, placing output in the
+    /// "stageN-tools" directory.
+    Tool,
+}
+
+impl Build {
+    /// Creates a new set of build configuration from the `flags` on the command
+    /// line and the filesystem `config`.
+    ///
+    /// By default all build output will be placed in the current directory.
+    pub fn new(flags: Flags, config: Config) -> Build {
+        let cwd = t!(env::current_dir());
+        let src = flags.src.clone().unwrap_or(cwd.clone());
+        let out = cwd.join("build");
+
+        let stage0_root = out.join(&config.build).join("stage0/bin");
+        let rustc = match config.rustc {
+            Some(ref s) => PathBuf::from(s),
+            None => stage0_root.join(exe("rustc", &config.build)),
+        };
+        let cargo = match config.cargo {
+            Some(ref s) => PathBuf::from(s),
+            None => stage0_root.join(exe("cargo", &config.build)),
+        };
+
+        Build {
+            flags: flags,
+            config: config,
+            cargo: cargo,
+            rustc: rustc,
+            src: src,
+            out: out,
+
+            release: String::new(),
+            unstable_features: false,
+            ver_hash: None,
+            short_ver_hash: None,
+            ver_date: None,
+            version: String::new(),
+            bootstrap_key: String::new(),
+            bootstrap_key_stage0: String::new(),
+            package_vers: String::new(),
+            cc: HashMap::new(),
+            cxx: HashMap::new(),
+            compiler_rt_built: RefCell::new(HashMap::new()),
+            gdb_version: None,
+            lldb_version: None,
+            lldb_python_dir: None,
+        }
+    }
+
+    /// Executes the entire build, as configured by the flags and configuration.
+    pub fn build(&mut self) {
+        use step::Source::*;
+
+        unsafe {
+            job::setup();
+        }
+
+        if self.flags.clean {
+            return clean::clean(self);
+        }
+
+        self.verbose("finding compilers");
+        cc::find(self);
+        self.verbose("running sanity check");
+        sanity::check(self);
+        self.verbose("collecting channel variables");
+        channel::collect(self);
+        self.verbose("updating submodules");
+        self.update_submodules();
+
+        // The main loop of the build system.
+        //
+        // The `step::all` function returns a topographically sorted list of all
+        // steps that need to be executed as part of this build. Each step has a
+        // corresponding entry in `step.rs` and indicates some unit of work that
+        // needs to be done as part of the build.
+        //
+        // Almost all of these are simple one-liners that shell out to the
+        // corresponding functionality in the extra modules, where more
+        // documentation can be found.
+        for target in step::all(self) {
+            let doc_out = self.out.join(&target.target).join("doc");
+            match target.src {
+                Llvm { _dummy } => {
+                    native::llvm(self, target.target);
+                }
+                CompilerRt { _dummy } => {
+                    native::compiler_rt(self, target.target);
+                }
+                TestHelpers { _dummy } => {
+                    native::test_helpers(self, target.target);
+                }
+                Libstd { compiler } => {
+                    compile::std(self, target.target, &compiler);
+                }
+                Libtest { compiler } => {
+                    compile::test(self, target.target, &compiler);
+                }
+                Librustc { compiler } => {
+                    compile::rustc(self, target.target, &compiler);
+                }
+                LibstdLink { compiler, host } => {
+                    compile::std_link(self, target.target, &compiler, host);
+                }
+                LibtestLink { compiler, host } => {
+                    compile::test_link(self, target.target, &compiler, host);
+                }
+                LibrustcLink { compiler, host } => {
+                    compile::rustc_link(self, target.target, &compiler, host);
+                }
+                Rustc { stage: 0 } => {
+                    // nothing to do...
+                }
+                Rustc { stage } => {
+                    compile::assemble_rustc(self, stage, target.target);
+                }
+                ToolLinkchecker { stage } => {
+                    compile::tool(self, stage, target.target, "linkchecker");
+                }
+                ToolRustbook { stage } => {
+                    compile::tool(self, stage, target.target, "rustbook");
+                }
+                ToolErrorIndex { stage } => {
+                    compile::tool(self, stage, target.target,
+                                  "error_index_generator");
+                }
+                ToolCargoTest { stage } => {
+                    compile::tool(self, stage, target.target, "cargotest");
+                }
+                ToolTidy { stage } => {
+                    compile::tool(self, stage, target.target, "tidy");
+                }
+                ToolCompiletest { stage } => {
+                    compile::tool(self, stage, target.target, "compiletest");
+                }
+                DocBook { stage } => {
+                    doc::rustbook(self, stage, target.target, "book", &doc_out);
+                }
+                DocNomicon { stage } => {
+                    doc::rustbook(self, stage, target.target, "nomicon",
+                                  &doc_out);
+                }
+                DocStyle { stage } => {
+                    doc::rustbook(self, stage, target.target, "style",
+                                  &doc_out);
+                }
+                DocStandalone { stage } => {
+                    doc::standalone(self, stage, target.target, &doc_out);
+                }
+                DocStd { stage } => {
+                    doc::std(self, stage, target.target, &doc_out);
+                }
+                DocTest { stage } => {
+                    doc::test(self, stage, target.target, &doc_out);
+                }
+                DocRustc { stage } => {
+                    doc::rustc(self, stage, target.target, &doc_out);
+                }
+                DocErrorIndex { stage } => {
+                    doc::error_index(self, stage, target.target, &doc_out);
+                }
+
+                CheckLinkcheck { stage } => {
+                    check::linkcheck(self, stage, target.target);
+                }
+                CheckCargoTest { stage } => {
+                    check::cargotest(self, stage, target.target);
+                }
+                CheckTidy { stage } => {
+                    check::tidy(self, stage, target.target);
+                }
+                CheckRPass { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass", "run-pass");
+                }
+                CheckRPassFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass", "run-pass-fulldeps");
+                }
+                CheckCFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "compile-fail", "compile-fail");
+                }
+                CheckCFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "compile-fail", "compile-fail-fulldeps")
+                }
+                CheckPFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "parse-fail", "parse-fail");
+                }
+                CheckRFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-fail", "run-fail");
+                }
+                CheckRFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-fail", "run-fail-fulldeps");
+                }
+                CheckPretty { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "pretty");
+                }
+                CheckPrettyRPass { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass");
+                }
+                CheckPrettyRPassFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass-fulldeps");
+                }
+                CheckPrettyRFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-fail");
+                }
+                CheckPrettyRFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-fail-fulldeps");
+                }
+                CheckPrettyRPassValgrind { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass-valgrind");
+                }
+                CheckCodegen { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "codegen", "codegen");
+                }
+                CheckCodegenUnits { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "codegen-units", "codegen-units");
+                }
+                CheckIncremental { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "incremental", "incremental");
+                }
+                CheckUi { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "ui", "ui");
+                }
+                CheckDebuginfo { compiler } => {
+                    if target.target.contains("msvc") {
+                        // nothing to do
+                    } else if target.target.contains("apple") {
+                        check::compiletest(self, &compiler, target.target,
+                                           "debuginfo-lldb", "debuginfo");
+                    } else {
+                        check::compiletest(self, &compiler, target.target,
+                                           "debuginfo-gdb", "debuginfo");
+                    }
+                }
+                CheckRustdoc { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "rustdoc", "rustdoc");
+                }
+                CheckRPassValgrind { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass-valgrind", "run-pass-valgrind");
+                }
+                CheckDocs { compiler } => {
+                    check::docs(self, &compiler);
+                }
+                CheckErrorIndex { compiler } => {
+                    check::error_index(self, &compiler);
+                }
+                CheckRMake { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-make", "run-make")
+                }
+                CheckCrateStd { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Libstd)
+                }
+                CheckCrateTest { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Libtest)
+                }
+                CheckCrateRustc { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Librustc)
+                }
+
+                DistDocs { stage } => dist::docs(self, stage, target.target),
+                DistMingw { _dummy } => dist::mingw(self, target.target),
+                DistRustc { stage } => dist::rustc(self, stage, target.target),
+                DistStd { compiler } => dist::std(self, &compiler, target.target),
+
+                DebuggerScripts { stage } => {
+                    let compiler = Compiler::new(stage, target.target);
+                    dist::debugger_scripts(self,
+                                           &self.sysroot(&compiler),
+                                           target.target);
+                }
+
+                AndroidCopyLibs { compiler } => {
+                    check::android_copy_libs(self, &compiler, target.target);
+                }
+
+                // pseudo-steps
+                Dist { .. } |
+                Doc { .. } |
+                CheckTarget { .. } |
+                Check { .. } => {}
+            }
+        }
+    }
+
+    /// Updates all git submodules that we have.
+    ///
+    /// This will detect if any submodules are out of date an run the necessary
+    /// commands to sync them all with upstream.
+    fn update_submodules(&self) {
+        if !self.config.submodules {
+            return
+        }
+        if fs::metadata(self.src.join(".git")).is_err() {
+            return
+        }
+        let git_submodule = || {
+            let mut cmd = Command::new("git");
+            cmd.current_dir(&self.src).arg("submodule");
+            return cmd
+        };
+
+        // FIXME: this takes a seriously long time to execute on Windows and a
+        //        nontrivial amount of time on Unix, we should have a better way
+        //        of detecting whether we need to run all the submodule commands
+        //        below.
+        let out = output(git_submodule().arg("status"));
+        if !out.lines().any(|l| l.starts_with("+") || l.starts_with("-")) {
+            return
+        }
+
+        self.run(git_submodule().arg("sync"));
+        self.run(git_submodule().arg("init"));
+        self.run(git_submodule().arg("update"));
+        self.run(git_submodule().arg("update").arg("--recursive"));
+        self.run(git_submodule().arg("status").arg("--recursive"));
+        self.run(git_submodule().arg("foreach").arg("--recursive")
+                                .arg("git").arg("clean").arg("-fdx"));
+        self.run(git_submodule().arg("foreach").arg("--recursive")
+                                .arg("git").arg("checkout").arg("."));
+    }
+
+    /// Clear out `dir` if `input` is newer.
+    ///
+    /// After this executes, it will also ensure that `dir` exists.
+    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
+        let stamp = dir.join(".stamp");
+        if mtime(&stamp) < mtime(input) {
+            self.verbose(&format!("Dirty - {}", dir.display()));
+            let _ = fs::remove_dir_all(dir);
+        }
+        t!(fs::create_dir_all(dir));
+        t!(File::create(stamp));
+    }
+
+    /// Prepares an invocation of `cargo` to be run.
+    ///
+    /// This will create a `Command` that represents a pending execution of
+    /// Cargo. This cargo will be configured to use `compiler` as the actual
+    /// rustc compiler, its output will be scoped by `mode`'s output directory,
+    /// it will pass the `--target` flag for the specified `target`, and will be
+    /// executing the Cargo command `cmd`.
+    fn cargo(&self,
+             compiler: &Compiler,
+             mode: Mode,
+             target: &str,
+             cmd: &str) -> Command {
+        let mut cargo = Command::new(&self.cargo);
+        let out_dir = self.stage_out(compiler, mode);
+        cargo.env("CARGO_TARGET_DIR", out_dir)
+             .arg(cmd)
+             .arg("-j").arg(self.jobs().to_string())
+             .arg("--target").arg(target);
+
+        let stage;
+        if compiler.stage == 0 && self.config.local_rebuild {
+            // Assume the local-rebuild rustc already has stage1 features.
+            stage = 1;
+        } else {
+            stage = compiler.stage;
+        }
+
+        // Customize the compiler we're running. Specify the compiler to cargo
+        // as our shim and then pass it some various options used to configure
+        // how the actual compiler itself is called.
+        //
+        // These variables are primarily all read by
+        // src/bootstrap/{rustc,rustdoc.rs}
+        cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
+             .env("RUSTC_REAL", self.compiler_path(compiler))
+             .env("RUSTC_STAGE", stage.to_string())
+             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
+             .env("RUSTC_CODEGEN_UNITS",
+                  self.config.rust_codegen_units.to_string())
+             .env("RUSTC_DEBUG_ASSERTIONS",
+                  self.config.rust_debug_assertions.to_string())
+             .env("RUSTC_SNAPSHOT", &self.rustc)
+             .env("RUSTC_SYSROOT", self.sysroot(compiler))
+             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
+             .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir())
+             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
+             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
+             .env("RUSTDOC_REAL", self.rustdoc(compiler))
+             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
+
+        self.add_bootstrap_key(compiler, &mut cargo);
+
+        // Specify some various options for build scripts used throughout
+        // the build.
+        //
+        // FIXME: the guard against msvc shouldn't need to be here
+        if !target.contains("msvc") {
+            cargo.env(format!("CC_{}", target), self.cc(target))
+                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
+                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
+        }
+
+        // If we're building for OSX, inform the compiler and the linker that
+        // we want to build a compiler runnable on 10.7
+        if target.contains("apple-darwin") {
+            cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7");
+        }
+
+        // Environment variables *required* needed throughout the build
+        //
+        // FIXME: should update code to not require this env var
+        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
+
+        if self.config.verbose || self.flags.verbose {
+            cargo.arg("-v");
+        }
+        if self.config.rust_optimize {
+            cargo.arg("--release");
+        }
+        return cargo
+    }
+
+    /// Get a path to the compiler specified.
+    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.rustc.clone()
+        } else {
+            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
+        }
+    }
+
+    /// Get the specified tool built by the specified compiler
+    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
+        self.cargo_out(compiler, Mode::Tool, compiler.host)
+            .join(exe(tool, compiler.host))
+    }
+
+    /// Get the `rustdoc` executable next to the specified compiler
+    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
+        let mut rustdoc = self.compiler_path(compiler);
+        rustdoc.pop();
+        rustdoc.push(exe("rustdoc", compiler.host));
+        return rustdoc
+    }
+
+    /// Get a `Command` which is ready to run `tool` in `stage` built for
+    /// `host`.
+    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
+        let mut cmd = Command::new(self.tool(&compiler, tool));
+        let host = compiler.host;
+        let paths = vec![
+            self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
+            self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
+            self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
+            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
+        ];
+        add_lib_path(paths, &mut cmd);
+        return cmd
+    }
+
+    /// Get the space-separated set of activated features for the standard
+    /// library.
+    fn std_features(&self) -> String {
+        let mut features = String::new();
+        if self.config.debug_jemalloc {
+            features.push_str(" debug-jemalloc");
+        }
+        if self.config.use_jemalloc {
+            features.push_str(" jemalloc");
+        }
+        return features
+    }
+
+    /// Get the space-separated set of activated features for the compiler.
+    fn rustc_features(&self) -> String {
+        let mut features = String::new();
+        if self.config.use_jemalloc {
+            features.push_str(" jemalloc");
+        }
+        return features
+    }
+
+    /// Component directory that Cargo will produce output into (e.g.
+    /// release/debug)
+    fn cargo_dir(&self) -> &'static str {
+        if self.config.rust_optimize {"release"} else {"debug"}
+    }
+
+    /// Returns the sysroot for the `compiler` specified that *this build system
+    /// generates*.
+    ///
+    /// That is, the sysroot for the stage0 compiler is not what the compiler
+    /// thinks it is by default, but it's the same as the default for stages
+    /// 1-3.
+    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.stage == 0 {
+            self.out.join(compiler.host).join("stage0-sysroot")
+        } else {
+            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
+        }
+    }
+
+    /// Returns the libdir where the standard library and other artifacts are
+    /// found for a compiler's sysroot.
+    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
+        self.sysroot(compiler).join("lib").join("rustlib")
+            .join(target).join("lib")
+    }
+
+    /// Returns the root directory for all output generated in a particular
+    /// stage when running with a particular host compiler.
+    ///
+    /// The mode indicates what the root directory is for.
+    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
+        let suffix = match mode {
+            Mode::Libstd => "-std",
+            Mode::Libtest => "-test",
+            Mode::Tool => "-tools",
+            Mode::Librustc => "-rustc",
+        };
+        self.out.join(compiler.host)
+                .join(format!("stage{}{}", compiler.stage, suffix))
+    }
+
+    /// Returns the root output directory for all Cargo output in a given stage,
+    /// running a particular comipler, wehther or not we're building the
+    /// standard library, and targeting the specified architecture.
+    fn cargo_out(&self,
+                 compiler: &Compiler,
+                 mode: Mode,
+                 target: &str) -> PathBuf {
+        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
+    }
+
+    /// Root output directory for LLVM compiled for `target`
+    ///
+    /// Note that if LLVM is configured externally then the directory returned
+    /// will likely be empty.
+    fn llvm_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("llvm")
+    }
+
+    /// Returns the path to `llvm-config` for the specified target.
+    ///
+    /// If a custom `llvm-config` was specified for target then that's returned
+    /// instead.
+    fn llvm_config(&self, target: &str) -> PathBuf {
+        let target_config = self.config.target_config.get(target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            s.clone()
+        } else {
+            self.llvm_out(&self.config.build).join("bin")
+                .join(exe("llvm-config", target))
+        }
+    }
+
+    /// Returns the path to `FileCheck` binary for the specified target
+    fn llvm_filecheck(&self, target: &str) -> PathBuf {
+        let target_config = self.config.target_config.get(target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            s.parent().unwrap().join(exe("FileCheck", target))
+        } else {
+            let base = self.llvm_out(&self.config.build).join("build");
+            let exe = exe("FileCheck", target);
+            if self.config.build.contains("msvc") {
+                base.join("Release/bin").join(exe)
+            } else {
+                base.join("bin").join(exe)
+            }
+        }
+    }
+
+    /// Root output directory for compiler-rt compiled for `target`
+    fn compiler_rt_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("compiler-rt")
+    }
+
+    /// Root output directory for rust_test_helpers library compiled for
+    /// `target`
+    fn test_helpers_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("rust-test-helpers")
+    }
+
+    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
+    /// library lookup path.
+    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
+        // Windows doesn't need dylib path munging because the dlls for the
+        // compiler live next to the compiler and the system will find them
+        // automatically.
+        if cfg!(windows) {
+            return
+        }
+
+        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
+    }
+
+    /// Adds the compiler's bootstrap key to the environment of `cmd`.
+    fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) {
+        // In stage0 we're using a previously released stable compiler, so we
+        // use the stage0 bootstrap key. Otherwise we use our own build's
+        // bootstrap key.
+        let bootstrap_key = if compiler.is_snapshot(self) && !self.config.local_rebuild {
+            &self.bootstrap_key_stage0
+        } else {
+            &self.bootstrap_key
+        };
+        cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key);
+    }
+
+    /// Returns the compiler's libdir where it stores the dynamic libraries that
+    /// it itself links against.
+    ///
+    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
+    /// Windows.
+    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.rustc_snapshot_libdir()
+        } else {
+            self.sysroot(compiler).join(libdir(compiler.host))
+        }
+    }
+
+    /// Returns the libdir of the snapshot compiler.
+    fn rustc_snapshot_libdir(&self) -> PathBuf {
+        self.rustc.parent().unwrap().parent().unwrap()
+            .join(libdir(&self.config.build))
+    }
+
+    /// Runs a command, printing out nice contextual information if it fails.
+    fn run(&self, cmd: &mut Command) {
+        self.verbose(&format!("running: {:?}", cmd));
+        run_silent(cmd)
+    }
+
+    /// Prints a message if this build is configured in verbose mode.
+    fn verbose(&self, msg: &str) {
+        if self.flags.verbose || self.config.verbose {
+            println!("{}", msg);
+        }
+    }
+
+    /// Returns the number of parallel jobs that have been configured for this
+    /// build.
+    fn jobs(&self) -> u32 {
+        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
+    }
+
+    /// Returns the path to the C compiler for the target specified.
+    fn cc(&self, target: &str) -> &Path {
+        self.cc[target].0.path()
+    }
+
+    /// Returns a list of flags to pass to the C compiler for the target
+    /// specified.
+    fn cflags(&self, target: &str) -> Vec<String> {
+        // Filter out -O and /O (the optimization flags) that we picked up from
+        // gcc-rs because the build scripts will determine that for themselves.
+        let mut base = self.cc[target].0.args().iter()
+                           .map(|s| s.to_string_lossy().into_owned())
+                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
+                           .collect::<Vec<_>>();
+
+        // If we're compiling on OSX then we add a few unconditional flags
+        // indicating that we want libc++ (more filled out than libstdc++) and
+        // we want to compile for 10.7. This way we can ensure that
+        // LLVM/jemalloc/etc are all properly compiled.
+        if target.contains("apple-darwin") {
+            base.push("-stdlib=libc++".into());
+            base.push("-mmacosx-version-min=10.7".into());
+        }
+        return base
+    }
+
+    /// Returns the path to the `ar` archive utility for the target specified.
+    fn ar(&self, target: &str) -> Option<&Path> {
+        self.cc[target].1.as_ref().map(|p| &**p)
+    }
+
+    /// Returns the path to the C++ compiler for the target specified, may panic
+    /// if no C++ compiler was configured for the target.
+    fn cxx(&self, target: &str) -> &Path {
+        self.cxx[target].path()
+    }
+
+    /// Returns flags to pass to the compiler to generate code for `target`.
+    fn rustc_flags(&self, target: &str) -> Vec<String> {
+        // New flags should be added here with great caution!
+        //
+        // It's quite unfortunate to **require** flags to generate code for a
+        // target, so it should only be passed here if absolutely necessary!
+        // Most default configuration should be done through target specs rather
+        // than an entry here.
+
+        let mut base = Vec::new();
+        if target != self.config.build && !target.contains("msvc") {
+            base.push(format!("-Clinker={}", self.cc(target).display()));
+        }
+        return base
     }
 }
 
-/// Parses the `dylib_path_var()` environment variable, returning a list of
-/// paths that are members of this lookup path.
-pub fn dylib_path() -> Vec<PathBuf> {
-    env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
-        .collect()
+impl<'a> Compiler<'a> {
+    /// Creates a new complier for the specified stage/host
+    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
+        Compiler { stage: stage, host: host }
+    }
+
+    /// Returns whether this is a snapshot compiler for `build`'s configuration
+    fn is_snapshot(&self, build: &Build) -> bool {
+        self.stage == 0 && self.host == build.config.build
+    }
 }
diff --git a/src/bootstrap/main.rs b/src/bootstrap/main.rs
deleted file mode 100644 (file)
index 18d03b5..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! rustbuild, the Rust build system
-//!
-//! This is the entry point for the build system used to compile the `rustc`
-//! compiler. Lots of documentation can be found in the `README.md` file next to
-//! this file, and otherwise documentation can be found throughout the `build`
-//! directory in each respective module.
-
-#![deny(warnings)]
-
-extern crate bootstrap;
-extern crate build_helper;
-extern crate cmake;
-extern crate filetime;
-extern crate gcc;
-extern crate getopts;
-extern crate libc;
-extern crate num_cpus;
-extern crate rustc_serialize;
-extern crate toml;
-extern crate md5;
-
-use std::env;
-
-use build::{Flags, Config, Build};
-
-mod build;
-
-fn main() {
-    let args = env::args().skip(1).collect::<Vec<_>>();
-    let flags = Flags::parse(&args);
-    let mut config = Config::parse(&flags.build, flags.config.clone());
-
-    // compat with `./configure` while we're still using that
-    if std::fs::metadata("config.mk").is_ok() {
-        config.update_with_config_mk();
-    }
-
-    Build::new(flags, config).build();
-}
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
new file mode 100644 (file)
index 0000000..83e9393
--- /dev/null
@@ -0,0 +1,238 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Compilation of native dependencies like LLVM.
+//!
+//! Native projects like LLVM unfortunately aren't suited just yet for
+//! compilation in build scripts that Cargo has. This is because thie
+//! compilation takes a *very* long time but also because we don't want to
+//! compile LLVM 3 times as part of a normal bootstrap (we want it cached).
+//!
+//! LLVM and compiler-rt are essentially just wired up to everything else to
+//! ensure that they're always in place if needed.
+
+use std::path::Path;
+use std::process::Command;
+use std::fs::{self, File};
+
+use build_helper::output;
+use cmake;
+use gcc;
+
+use Build;
+use util::{staticlib, up_to_date};
+
+/// Compile LLVM for `target`.
+pub fn llvm(build: &Build, target: &str) {
+    // If we're using a custom LLVM bail out here, but we can only use a
+    // custom LLVM for the build triple.
+    if let Some(config) = build.config.target_config.get(target) {
+        if let Some(ref s) = config.llvm_config {
+            return check_llvm_version(build, s);
+        }
+    }
+
+    // If the cleaning trigger is newer than our built artifacts (or if the
+    // artifacts are missing) then we keep going, otherwise we bail out.
+    let dst = build.llvm_out(target);
+    let stamp = build.src.join("src/rustllvm/llvm-auto-clean-trigger");
+    let done_stamp = dst.join("llvm-finished-building");
+    build.clear_if_dirty(&dst, &stamp);
+    if fs::metadata(&done_stamp).is_ok() {
+        return
+    }
+
+    println!("Building LLVM for {}", target);
+
+    let _ = fs::remove_dir_all(&dst.join("build"));
+    t!(fs::create_dir_all(&dst.join("build")));
+    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
+
+    // http://llvm.org/docs/CMake.html
+    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
+    if build.config.ninja {
+        cfg.generator("Ninja");
+    }
+    cfg.target(target)
+       .host(&build.config.build)
+       .out_dir(&dst)
+       .profile(if build.config.llvm_optimize {"Release"} else {"Debug"})
+       .define("LLVM_ENABLE_ASSERTIONS", assertions)
+       .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC")
+       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
+       .define("LLVM_INCLUDE_TESTS", "OFF")
+       .define("LLVM_INCLUDE_DOCS", "OFF")
+       .define("LLVM_ENABLE_ZLIB", "OFF")
+       .define("WITH_POLLY", "OFF")
+       .define("LLVM_ENABLE_TERMINFO", "OFF")
+       .define("LLVM_ENABLE_LIBEDIT", "OFF")
+       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string());
+
+    if target.starts_with("i686") {
+        cfg.define("LLVM_BUILD_32_BITS", "ON");
+    }
+
+    // http://llvm.org/docs/HowToCrossCompileLLVM.html
+    if target != build.config.build {
+        // FIXME: if the llvm root for the build triple is overridden then we
+        //        should use llvm-tblgen from there, also should verify that it
+        //        actually exists most of the time in normal installs of LLVM.
+        let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
+        cfg.define("CMAKE_CROSSCOMPILING", "True")
+           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
+           .define("LLVM_TABLEGEN", &host)
+           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
+    }
+
+    // MSVC handles compiler business itself
+    if !target.contains("msvc") {
+        if build.config.ccache {
+           cfg.define("CMAKE_C_COMPILER", "ccache")
+              .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
+              .define("CMAKE_CXX_COMPILER", "ccache")
+              .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
+        } else {
+           cfg.define("CMAKE_C_COMPILER", build.cc(target))
+              .define("CMAKE_CXX_COMPILER", build.cxx(target));
+        }
+        cfg.build_arg("-j").build_arg(build.jobs().to_string());
+
+        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
+        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
+    }
+
+    // FIXME: we don't actually need to build all LLVM tools and all LLVM
+    //        libraries here, e.g. we just want a few components and a few
+    //        tools. Figure out how to filter them down and only build the right
+    //        tools and libs on all platforms.
+    cfg.build();
+
+    t!(File::create(&done_stamp));
+}
+
+fn check_llvm_version(build: &Build, llvm_config: &Path) {
+    if !build.config.llvm_version_check {
+        return
+    }
+
+    let mut cmd = Command::new(llvm_config);
+    let version = output(cmd.arg("--version"));
+    if version.starts_with("3.5") || version.starts_with("3.6") ||
+       version.starts_with("3.7") {
+        return
+    }
+    panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
+}
+
+/// Compiles the `compiler-rt` library, or at least the builtins part of it.
+///
+/// This uses the CMake build system and an existing LLVM build directory to
+/// compile the project.
+pub fn compiler_rt(build: &Build, target: &str) {
+    let dst = build.compiler_rt_out(target);
+    let arch = target.split('-').next().unwrap();
+    let mode = if build.config.rust_optimize {"Release"} else {"Debug"};
+
+    let build_llvm_config = build.llvm_config(&build.config.build);
+    let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt"));
+    cfg.target(target)
+       .host(&build.config.build)
+       .out_dir(&dst)
+       .profile(mode)
+       .define("LLVM_CONFIG_PATH", build_llvm_config)
+       .define("COMPILER_RT_DEFAULT_TARGET_TRIPLE", target)
+       .define("COMPILER_RT_BUILD_SANITIZERS", "OFF")
+       .define("COMPILER_RT_BUILD_EMUTLS", "OFF")
+       // inform about c/c++ compilers, the c++ compiler isn't actually used but
+       // it's needed to get the initial configure to work on all platforms.
+       .define("CMAKE_C_COMPILER", build.cc(target))
+       .define("CMAKE_CXX_COMPILER", build.cc(target));
+
+    let (dir, build_target, libname) = if target.contains("linux") ||
+                                          target.contains("freebsd") ||
+                                          target.contains("netbsd") {
+        let os_extra = if target.contains("android") && target.contains("arm") {
+            "-android"
+        } else {
+            ""
+        };
+        let builtins_arch = match arch {
+            "i586" => "i386",
+            "arm" | "armv7" if target.contains("android") => "armhf",
+            "arm" if target.contains("eabihf") => "armhf",
+            _ => arch,
+        };
+        let target = format!("clang_rt.builtins-{}", builtins_arch);
+        ("linux".to_string(),
+         target.clone(),
+         format!("{}{}", target, os_extra))
+    } else if target.contains("apple-darwin") {
+        let builtins_arch = match arch {
+            "i686" => "i386",
+            _ => arch,
+        };
+        let target = format!("clang_rt.builtins_{}_osx", builtins_arch);
+        ("builtins".to_string(), target.clone(), target)
+    } else if target.contains("apple-ios") {
+        cfg.define("COMPILER_RT_ENABLE_IOS", "ON");
+        let target = match arch {
+            "armv7s" => "hard_pic_armv7em_macho_embedded".to_string(),
+            "aarch64" => "builtins_arm64_ios".to_string(),
+            _ => format!("hard_pic_{}_macho_embedded", arch),
+        };
+        ("builtins".to_string(), target.clone(), target)
+    } else if target.contains("windows-gnu") {
+        let target = format!("clang_rt.builtins-{}", arch);
+        ("windows".to_string(), target.clone(), target)
+    } else if target.contains("windows-msvc") {
+        let builtins_arch = match arch {
+            "i586" | "i686" => "i386",
+            _ => arch,
+        };
+        (format!("windows/{}", mode),
+         "lib/builtins/builtins".to_string(),
+         format!("clang_rt.builtins-{}", builtins_arch))
+    } else {
+        panic!("can't get os from target: {}", target)
+    };
+    let output = dst.join("build/lib").join(dir)
+                    .join(staticlib(&libname, target));
+    build.compiler_rt_built.borrow_mut().insert(target.to_string(),
+                                                output.clone());
+    if fs::metadata(&output).is_ok() {
+        return
+    }
+    let _ = fs::remove_dir_all(&dst);
+    t!(fs::create_dir_all(&dst));
+    cfg.build_target(&build_target);
+    cfg.build();
+}
+
+/// Compiles the `rust_test_helpers.c` library which we used in various
+/// `run-pass` test suites for ABI testing.
+pub fn test_helpers(build: &Build, target: &str) {
+    let dst = build.test_helpers_out(target);
+    let src = build.src.join("src/rt/rust_test_helpers.c");
+    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
+        return
+    }
+
+    println!("Building test helpers");
+    t!(fs::create_dir_all(&dst));
+    let mut cfg = gcc::Config::new();
+    cfg.cargo_metadata(false)
+       .out_dir(&dst)
+       .target(target)
+       .host(&build.config.build)
+       .opt_level(0)
+       .debug(false)
+       .file(build.src.join("src/rt/rust_test_helpers.c"))
+       .compile("librust_test_helpers.a");
+}
diff --git a/src/bootstrap/rustc.rs b/src/bootstrap/rustc.rs
deleted file mode 100644 (file)
index 97deced..0000000
+++ /dev/null
@@ -1,164 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
-//!
-//! This shim will take care of some various tasks that our build process
-//! requires that Cargo can't quite do through normal configuration:
-//!
-//! 1. When compiling build scripts and build dependencies, we need a guaranteed
-//!    full standard library available. The only compiler which actually has
-//!    this is the snapshot, so we detect this situation and always compile with
-//!    the snapshot compiler.
-//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
-//!    (and this slightly differs based on a whether we're using a snapshot or
-//!    not), so we do that all here.
-//!
-//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
-//! switching compilers for the bootstrap and for build scripts will probably
-//! never get replaced.
-
-extern crate bootstrap;
-
-use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-use std::process::Command;
-
-fn main() {
-    let args = env::args_os().skip(1).collect::<Vec<_>>();
-    // Detect whether or not we're a build script depending on whether --target
-    // is passed (a bit janky...)
-    let target = args.windows(2).find(|w| &*w[0] == "--target")
-                                .and_then(|w| w[1].to_str());
-
-    // Build scripts always use the snapshot compiler which is guaranteed to be
-    // able to produce an executable, whereas intermediate compilers may not
-    // have the standard library built yet and may not be able to produce an
-    // executable. Otherwise we just use the standard compiler we're
-    // bootstrapping with.
-    let (rustc, libdir) = if target.is_none() {
-        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
-    } else {
-        ("RUSTC_REAL", "RUSTC_LIBDIR")
-    };
-    let stage = env::var("RUSTC_STAGE").unwrap();
-
-    let rustc = env::var_os(rustc).unwrap();
-    let libdir = env::var_os(libdir).unwrap();
-    let mut dylib_path = bootstrap::dylib_path();
-    dylib_path.insert(0, PathBuf::from(libdir));
-
-    let mut cmd = Command::new(rustc);
-    cmd.args(&args)
-       .arg("--cfg").arg(format!("stage{}", stage))
-       .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
-    if let Some(target) = target {
-        // The stage0 compiler has a special sysroot distinct from what we
-        // actually downloaded, so we just always pass the `--sysroot` option.
-        cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap());
-
-        // When we build Rust dylibs they're all intended for intermediate
-        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
-        // linking all deps statically into the dylib.
-        cmd.arg("-Cprefer-dynamic");
-
-        // Help the libc crate compile by assisting it in finding the MUSL
-        // native libraries.
-        if let Some(s) = env::var_os("MUSL_ROOT") {
-            let mut root = OsString::from("native=");
-            root.push(&s);
-            root.push("/lib");
-            cmd.arg("-L").arg(&root);
-        }
-
-        // Pass down extra flags, commonly used to configure `-Clinker` when
-        // cross compiling.
-        if let Ok(s) = env::var("RUSTC_FLAGS") {
-            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
-        }
-
-        // If we're compiling specifically the `panic_abort` crate then we pass
-        // the `-C panic=abort` option. Note that we do not do this for any
-        // other crate intentionally as this is the only crate for now that we
-        // ship with panic=abort.
-        //
-        // This... is a bit of a hack how we detect this. Ideally this
-        // information should be encoded in the crate I guess? Would likely
-        // require an RFC amendment to RFC 1513, however.
-        let is_panic_abort = args.windows(2).any(|a| {
-            &*a[0] == "--crate-name" && &*a[1] == "panic_abort"
-        });
-        // FIXME(stage0): remove this `stage != "0"` condition
-        if is_panic_abort && stage != "0" {
-            cmd.arg("-C").arg("panic=abort");
-        }
-
-        // Set various options from config.toml to configure how we're building
-        // code.
-        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
-            cmd.arg("-g");
-        }
-        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
-            Ok(s) => if s == "true" {"y"} else {"n"},
-            Err(..) => "n",
-        };
-        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
-        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
-            cmd.arg("-C").arg(format!("codegen-units={}", s));
-        }
-
-        // Dealing with rpath here is a little special, so let's go into some
-        // detail. First off, `-rpath` is a linker option on Unix platforms
-        // which adds to the runtime dynamic loader path when looking for
-        // dynamic libraries. We use this by default on Unix platforms to ensure
-        // that our nightlies behave the same on Windows, that is they work out
-        // of the box. This can be disabled, of course, but basically that's why
-        // we're gated on RUSTC_RPATH here.
-        //
-        // Ok, so the astute might be wondering "why isn't `-C rpath` used
-        // here?" and that is indeed a good question to task. This codegen
-        // option is the compiler's current interface to generating an rpath.
-        // Unfortunately it doesn't quite suffice for us. The flag currently
-        // takes no value as an argument, so the compiler calculates what it
-        // should pass to the linker as `-rpath`. This unfortunately is based on
-        // the **compile time** directory structure which when building with
-        // Cargo will be very different than the runtime directory structure.
-        //
-        // All that's a really long winded way of saying that if we use
-        // `-Crpath` then the executables generated have the wrong rpath of
-        // something like `$ORIGIN/deps` when in fact the way we distribute
-        // rustc requires the rpath to be `$ORIGIN/../lib`.
-        //
-        // So, all in all, to set up the correct rpath we pass the linker
-        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
-        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
-        // to change a flag in a binary?
-        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
-            let rpath = if target.contains("apple") {
-                Some("-Wl,-rpath,@loader_path/../lib")
-            } else if !target.contains("windows") {
-                Some("-Wl,-rpath,$ORIGIN/../lib")
-            } else {
-                None
-            };
-            if let Some(rpath) = rpath {
-                cmd.arg("-C").arg(format!("link-args={}", rpath));
-            }
-        }
-    }
-
-    // Actually run the compiler!
-    std::process::exit(match cmd.status() {
-        Ok(s) => s.code().unwrap_or(1),
-        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
-    })
-}
diff --git a/src/bootstrap/rustdoc.rs b/src/bootstrap/rustdoc.rs
deleted file mode 100644 (file)
index 88ac26d..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
-//!
-//! See comments in `src/bootstrap/rustc.rs` for more information.
-
-extern crate bootstrap;
-
-use std::env;
-use std::process::Command;
-use std::path::PathBuf;
-
-fn main() {
-    let args = env::args_os().skip(1).collect::<Vec<_>>();
-    let rustdoc = env::var_os("RUSTDOC_REAL").unwrap();
-    let libdir = env::var_os("RUSTC_LIBDIR").unwrap();
-
-    let mut dylib_path = bootstrap::dylib_path();
-    dylib_path.insert(0, PathBuf::from(libdir));
-
-    let mut cmd = Command::new(rustdoc);
-    cmd.args(&args)
-       .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap()))
-       .arg("--cfg").arg("dox")
-       .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-    std::process::exit(match cmd.status() {
-        Ok(s) => s.code().unwrap_or(1),
-        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
-    })
-}
-
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
new file mode 100644 (file)
index 0000000..7c0f09c
--- /dev/null
@@ -0,0 +1,172 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Sanity checking performed by rustbuild before actually executing anything.
+//!
+//! This module contains the implementation of ensuring that the build
+//! environment looks reasonable before progressing. This will verify that
+//! various programs like git and python exist, along with ensuring that all C
+//! compilers for cross-compiling are found.
+//!
+//! In theory if we get past this phase it's a bug if a build fails, but in
+//! practice that's likely not true!
+
+use std::collections::HashSet;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs;
+use std::process::Command;
+
+use build_helper::output;
+
+use Build;
+
+pub fn check(build: &mut Build) {
+    let mut checked = HashSet::new();
+    let path = env::var_os("PATH").unwrap_or(OsString::new());
+    let mut need_cmd = |cmd: &OsStr| {
+        if !checked.insert(cmd.to_owned()) {
+            return
+        }
+        for path in env::split_paths(&path).map(|p| p.join(cmd)) {
+            if fs::metadata(&path).is_ok() ||
+               fs::metadata(path.with_extension("exe")).is_ok() {
+                return
+            }
+        }
+        panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
+    };
+
+    // If we've got a git directory we're gona need git to update
+    // submodules and learn about various other aspects.
+    if fs::metadata(build.src.join(".git")).is_ok() {
+        need_cmd("git".as_ref());
+    }
+
+    // We need cmake, but only if we're actually building LLVM
+    for host in build.config.host.iter() {
+        if let Some(config) = build.config.target_config.get(host) {
+            if config.llvm_config.is_some() {
+                continue
+            }
+        }
+        need_cmd("cmake".as_ref());
+        if build.config.ninja {
+            need_cmd("ninja".as_ref())
+        }
+        break
+    }
+
+    need_cmd("python".as_ref());
+
+    // We're gonna build some custom C code here and there, host triples
+    // also build some C++ shims for LLVM so we need a C++ compiler.
+    for target in build.config.target.iter() {
+        need_cmd(build.cc(target).as_ref());
+        if let Some(ar) = build.ar(target) {
+            need_cmd(ar.as_ref());
+        }
+    }
+    for host in build.config.host.iter() {
+        need_cmd(build.cxx(host).as_ref());
+    }
+
+    // Externally configured LLVM requires FileCheck to exist
+    let filecheck = build.llvm_filecheck(&build.config.build);
+    if !filecheck.starts_with(&build.out) && !filecheck.exists() {
+        panic!("filecheck executable {:?} does not exist", filecheck);
+    }
+
+    for target in build.config.target.iter() {
+        // Either can't build or don't want to run jemalloc on these targets
+        if target.contains("rumprun") ||
+           target.contains("bitrig") ||
+           target.contains("openbsd") ||
+           target.contains("msvc") {
+            build.config.use_jemalloc = false;
+        }
+
+        // Can't compile for iOS unless we're on OSX
+        if target.contains("apple-ios") &&
+           !build.config.build.contains("apple-darwin") {
+            panic!("the iOS target is only supported on OSX");
+        }
+
+        // Make sure musl-root is valid if specified
+        if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) {
+            match build.config.musl_root {
+                Some(ref root) => {
+                    if fs::metadata(root.join("lib/libc.a")).is_err() {
+                        panic!("couldn't find libc.a in musl dir: {}",
+                               root.join("lib").display());
+                    }
+                    if fs::metadata(root.join("lib/libunwind.a")).is_err() {
+                        panic!("couldn't find libunwind.a in musl dir: {}",
+                               root.join("lib").display());
+                    }
+                }
+                None => {
+                    panic!("when targeting MUSL the build.musl-root option \
+                            must be specified in config.toml")
+                }
+            }
+        }
+
+        if target.contains("msvc") {
+            // There are three builds of cmake on windows: MSVC, MinGW, and
+            // Cygwin. The Cygwin build does not have generators for Visual
+            // Studio, so detect that here and error.
+            let out = output(Command::new("cmake").arg("--help"));
+            if !out.contains("Visual Studio") {
+                panic!("
+cmake does not support Visual Studio generators.
+
+This is likely due to it being an msys/cygwin build of cmake,
+rather than the required windows version, built using MinGW
+or Visual Studio.
+
+If you are building under msys2 try installing the mingw-w64-x86_64-cmake
+package instead of cmake:
+
+$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
+");
+            }
+        }
+
+        if target.contains("arm-linux-android") {
+            need_cmd("adb".as_ref());
+        }
+    }
+
+    for host in build.flags.host.iter() {
+        if !build.config.host.contains(host) {
+            panic!("specified host `{}` is not in the ./configure list", host);
+        }
+    }
+    for target in build.flags.target.iter() {
+        if !build.config.target.contains(target) {
+            panic!("specified target `{}` is not in the ./configure list",
+                   target);
+        }
+    }
+
+    let run = |cmd: &mut Command| {
+        cmd.output().map(|output| {
+            String::from_utf8_lossy(&output.stdout)
+                   .lines().next().unwrap()
+                   .to_string()
+        })
+    };
+    build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
+    build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
+    if build.lldb_version.is_some() {
+        build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
+    }
+}
diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs
new file mode 100644 (file)
index 0000000..4b3be04
--- /dev/null
@@ -0,0 +1,590 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Major workhorse of rustbuild, definition and dependencies between stages of
+//! the copmile.
+//!
+//! The primary purpose of this module is to define the various `Step`s of
+//! execution of the build. Each `Step` has a corresponding `Source` indicating
+//! what it's actually doing along with a number of dependencies which must be
+//! executed first.
+//!
+//! This module will take the CLI as input and calculate the steps required for
+//! the build requested, ensuring that all intermediate pieces are in place.
+//! Essentially this module is a `make`-replacement, but not as good.
+
+use std::collections::HashSet;
+
+use {Build, Compiler};
+
+#[derive(Hash, Eq, PartialEq, Clone, Debug)]
+pub struct Step<'a> {
+    pub src: Source<'a>,
+    pub target: &'a str,
+}
+
+/// Macro used to iterate over all targets that are recognized by the build
+/// system.
+///
+/// Whenever a new step is added it will involve adding an entry here, updating
+/// the dependencies section below, and then adding an implementation of the
+/// step in `build/mod.rs`.
+///
+/// This macro takes another macro as an argument and then calls that macro with
+/// all steps that the build system knows about.
+macro_rules! targets {
+    ($m:ident) => {
+        $m! {
+            // Step representing building the stageN compiler. This is just the
+            // compiler executable itself, not any of the support libraries
+            (rustc, Rustc { stage: u32 }),
+
+            // Steps for the two main cargo builds. These are parameterized over
+            // the compiler which is producing the artifact.
+            (libstd, Libstd { compiler: Compiler<'a> }),
+            (libtest, Libtest { compiler: Compiler<'a> }),
+            (librustc, Librustc { compiler: Compiler<'a> }),
+
+            // Links the target produced by the compiler provided into the
+            // host's directory also provided.
+            (libstd_link, LibstdLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+            (libtest_link, LibtestLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+            (librustc_link, LibrustcLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+
+            // Various tools that we can build as part of the build.
+            (tool_linkchecker, ToolLinkchecker { stage: u32 }),
+            (tool_rustbook, ToolRustbook { stage: u32 }),
+            (tool_error_index, ToolErrorIndex { stage: u32 }),
+            (tool_cargotest, ToolCargoTest { stage: u32 }),
+            (tool_tidy, ToolTidy { stage: u32 }),
+            (tool_compiletest, ToolCompiletest { stage: u32 }),
+
+            // Steps for long-running native builds. Ideally these wouldn't
+            // actually exist and would be part of build scripts, but for now
+            // these are here.
+            //
+            // There aren't really any parameters to this, but empty structs
+            // with braces are unstable so we just pick something that works.
+            (llvm, Llvm { _dummy: () }),
+            (compiler_rt, CompilerRt { _dummy: () }),
+            (test_helpers, TestHelpers { _dummy: () }),
+            (debugger_scripts, DebuggerScripts { stage: u32 }),
+
+            // Steps for various pieces of documentation that we can generate,
+            // the 'doc' step is just a pseudo target to depend on a bunch of
+            // others.
+            (doc, Doc { stage: u32 }),
+            (doc_book, DocBook { stage: u32 }),
+            (doc_nomicon, DocNomicon { stage: u32 }),
+            (doc_style, DocStyle { stage: u32 }),
+            (doc_standalone, DocStandalone { stage: u32 }),
+            (doc_std, DocStd { stage: u32 }),
+            (doc_test, DocTest { stage: u32 }),
+            (doc_rustc, DocRustc { stage: u32 }),
+            (doc_error_index, DocErrorIndex { stage: u32 }),
+
+            // Steps for running tests. The 'check' target is just a pseudo
+            // target to depend on a bunch of others.
+            (check, Check { stage: u32, compiler: Compiler<'a> }),
+            (check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
+            (check_linkcheck, CheckLinkcheck { stage: u32 }),
+            (check_cargotest, CheckCargoTest { stage: u32 }),
+            (check_tidy, CheckTidy { stage: u32 }),
+            (check_rpass, CheckRPass { compiler: Compiler<'a> }),
+            (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
+            (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
+            (check_rfail, CheckRFail { compiler: Compiler<'a> }),
+            (check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }),
+            (check_cfail, CheckCFail { compiler: Compiler<'a> }),
+            (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
+            (check_pfail, CheckPFail { compiler: Compiler<'a> }),
+            (check_pretty, CheckPretty { compiler: Compiler<'a> }),
+            (check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }),
+            (check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }),
+            (check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }),
+            (check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }),
+            (check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }),
+            (check_codegen, CheckCodegen { compiler: Compiler<'a> }),
+            (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
+            (check_incremental, CheckIncremental { compiler: Compiler<'a> }),
+            (check_ui, CheckUi { compiler: Compiler<'a> }),
+            (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
+            (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
+            (check_docs, CheckDocs { compiler: Compiler<'a> }),
+            (check_error_index, CheckErrorIndex { compiler: Compiler<'a> }),
+            (check_rmake, CheckRMake { compiler: Compiler<'a> }),
+            (check_crate_std, CheckCrateStd { compiler: Compiler<'a> }),
+            (check_crate_test, CheckCrateTest { compiler: Compiler<'a> }),
+            (check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }),
+
+            // Distribution targets, creating tarballs
+            (dist, Dist { stage: u32 }),
+            (dist_docs, DistDocs { stage: u32 }),
+            (dist_mingw, DistMingw { _dummy: () }),
+            (dist_rustc, DistRustc { stage: u32 }),
+            (dist_std, DistStd { compiler: Compiler<'a> }),
+
+            // Misc targets
+            (android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
+        }
+    }
+}
+
+// Define the `Source` enum by iterating over all the steps and peeling out just
+// the types that we want to define.
+
+macro_rules! item { ($a:item) => ($a) }
+
+macro_rules! define_source {
+    ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {
+        item! {
+            #[derive(Hash, Eq, PartialEq, Clone, Debug)]
+            pub enum Source<'a> {
+                $($name { $($args)* }),*
+            }
+        }
+    }
+}
+
+targets!(define_source);
+
+/// Calculate a list of all steps described by `build`.
+///
+/// This will inspect the flags passed in on the command line and use that to
+/// build up a list of steps to execute. These steps will then be transformed
+/// into a topologically sorted list which when executed left-to-right will
+/// correctly sequence the entire build.
+pub fn all(build: &Build) -> Vec<Step> {
+    let mut ret = Vec::new();
+    let mut all = HashSet::new();
+    for target in top_level(build) {
+        fill(build, &target, &mut ret, &mut all);
+    }
+    return ret;
+
+    fn fill<'a>(build: &'a Build,
+                target: &Step<'a>,
+                ret: &mut Vec<Step<'a>>,
+                set: &mut HashSet<Step<'a>>) {
+        if set.insert(target.clone()) {
+            for dep in target.deps(build) {
+                fill(build, &dep, ret, set);
+            }
+            ret.push(target.clone());
+        }
+    }
+}
+
+/// Determines what top-level targets are requested as part of this build,
+/// returning them as a list.
+fn top_level(build: &Build) -> Vec<Step> {
+    let mut targets = Vec::new();
+    let stage = build.flags.stage.unwrap_or(2);
+
+    let host = Step {
+        src: Source::Llvm { _dummy: () },
+        target: build.flags.host.iter().next()
+                     .unwrap_or(&build.config.build),
+    };
+    let target = Step {
+        src: Source::Llvm { _dummy: () },
+        target: build.flags.target.iter().next().map(|x| &x[..])
+                     .unwrap_or(host.target)
+    };
+
+    // First, try to find steps on the command line.
+    add_steps(build, stage, &host, &target, &mut targets);
+
+    // If none are specified, then build everything.
+    if targets.len() == 0 {
+        let t = Step {
+            src: Source::Llvm { _dummy: () },
+            target: &build.config.build,
+        };
+        if build.config.docs {
+          targets.push(t.doc(stage));
+        }
+        for host in build.config.host.iter() {
+            if !build.flags.host.contains(host) {
+                continue
+            }
+            let host = t.target(host);
+            if host.target == build.config.build {
+                targets.push(host.librustc(host.compiler(stage)));
+            } else {
+                targets.push(host.librustc_link(t.compiler(stage), host.target));
+            }
+            for target in build.config.target.iter() {
+                if !build.flags.target.contains(target) {
+                    continue
+                }
+
+                if host.target == build.config.build {
+                    targets.push(host.target(target)
+                                     .libtest(host.compiler(stage)));
+                } else {
+                    targets.push(host.target(target)
+                                     .libtest_link(t.compiler(stage), host.target));
+                }
+            }
+        }
+    }
+
+    return targets
+
+}
+
+fn add_steps<'a>(build: &'a Build,
+                 stage: u32,
+                 host: &Step<'a>,
+                 target: &Step<'a>,
+                 targets: &mut Vec<Step<'a>>) {
+    struct Context<'a> {
+        stage: u32,
+        compiler: Compiler<'a>,
+        _dummy: (),
+        host: &'a str,
+    }
+    for step in build.flags.step.iter() {
+
+        // The macro below insists on hygienic access to all local variables, so
+        // we shove them all in a struct and subvert hygiene by accessing struct
+        // fields instead,
+        let cx = Context {
+            stage: stage,
+            compiler: host.target(&build.config.build).compiler(stage),
+            _dummy: (),
+            host: host.target,
+        };
+        macro_rules! add_step {
+            ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(
+                let name = stringify!($short).replace("_", "-");
+                if &step[..] == &name[..] {
+                    targets.push(target.$short($(cx.$arg),*));
+                    continue
+                }
+                drop(name);
+            )*})
+        }
+
+        targets!(add_step);
+
+        panic!("unknown step: {}", step);
+    }
+}
+
+macro_rules! constructors {
+    ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(
+        fn $short(&self, $($arg: $t),*) -> Step<'a> {
+            Step {
+                src: Source::$name { $($arg: $arg),* },
+                target: self.target,
+            }
+        }
+    )*}
+}
+
+impl<'a> Step<'a> {
+    fn compiler(&self, stage: u32) -> Compiler<'a> {
+        Compiler::new(stage, self.target)
+    }
+
+    fn target(&self, target: &'a str) -> Step<'a> {
+        Step { target: target, src: self.src.clone() }
+    }
+
+    // Define ergonomic constructors for each step defined above so they can be
+    // easily constructed.
+    targets!(constructors);
+
+    /// Mapping of all dependencies for rustbuild.
+    ///
+    /// This function receives a step, the build that we're building for, and
+    /// then returns a list of all the dependencies of that step.
+    pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {
+        match self.src {
+            Source::Rustc { stage: 0 } => {
+                Vec::new()
+            }
+            Source::Rustc { stage } => {
+                let compiler = Compiler::new(stage - 1, &build.config.build);
+                vec![self.librustc(compiler)]
+            }
+            Source::Librustc { compiler } => {
+                vec![self.libtest(compiler), self.llvm(())]
+            }
+            Source::Libtest { compiler } => {
+                vec![self.libstd(compiler)]
+            }
+            Source::Libstd { compiler } => {
+                vec![self.compiler_rt(()),
+                     self.rustc(compiler.stage).target(compiler.host)]
+            }
+            Source::LibrustcLink { compiler, host } => {
+                vec![self.librustc(compiler),
+                     self.libtest_link(compiler, host)]
+            }
+            Source::LibtestLink { compiler, host } => {
+                vec![self.libtest(compiler), self.libstd_link(compiler, host)]
+            }
+            Source::LibstdLink { compiler, host } => {
+                vec![self.libstd(compiler),
+                     self.target(host).rustc(compiler.stage)]
+            }
+            Source::CompilerRt { _dummy } => {
+                vec![self.llvm(()).target(&build.config.build)]
+            }
+            Source::Llvm { _dummy } => Vec::new(),
+            Source::TestHelpers { _dummy } => Vec::new(),
+            Source::DebuggerScripts { stage: _ } => Vec::new(),
+
+            // Note that all doc targets depend on artifacts from the build
+            // architecture, not the target (which is where we're generating
+            // docs into).
+            Source::DocStd { stage } => {
+                let compiler = self.target(&build.config.build).compiler(stage);
+                vec![self.libstd(compiler)]
+            }
+            Source::DocTest { stage } => {
+                let compiler = self.target(&build.config.build).compiler(stage);
+                vec![self.libtest(compiler)]
+            }
+            Source::DocBook { stage } |
+            Source::DocNomicon { stage } |
+            Source::DocStyle { stage } => {
+                vec![self.target(&build.config.build).tool_rustbook(stage)]
+            }
+            Source::DocErrorIndex { stage } => {
+                vec![self.target(&build.config.build).tool_error_index(stage)]
+            }
+            Source::DocStandalone { stage } => {
+                vec![self.target(&build.config.build).rustc(stage)]
+            }
+            Source::DocRustc { stage } => {
+                vec![self.doc_test(stage)]
+            }
+            Source::Doc { stage } => {
+                vec![self.doc_book(stage), self.doc_nomicon(stage),
+                     self.doc_style(stage), self.doc_standalone(stage),
+                     self.doc_std(stage),
+                     self.doc_error_index(stage)]
+            }
+            Source::Check { stage, compiler } => {
+                // Check is just a pseudo step which means check all targets,
+                // so just depend on checking all targets.
+                build.config.target.iter().map(|t| {
+                    self.target(t).check_target(stage, compiler)
+                }).collect()
+            }
+            Source::CheckTarget { stage, compiler } => {
+                // CheckTarget here means run all possible test suites for this
+                // target. Most of the time, however, we can't actually run
+                // anything if we're not the build triple as we could be cross
+                // compiling.
+                //
+                // As a result, the base set of targets here is quite stripped
+                // down from the standard set of targets. These suites have
+                // their own internal logic to run in cross-compiled situations
+                // if they'll run at all. For example compiletest knows that
+                // when testing Android targets we ship artifacts to the
+                // emulator.
+                //
+                // When in doubt the rule of thumb for adding to this list is
+                // "should this test suite run on the android bot?"
+                let mut base = vec![
+                    self.check_rpass(compiler),
+                    self.check_rfail(compiler),
+                    self.check_crate_std(compiler),
+                    self.check_crate_test(compiler),
+                    self.check_debuginfo(compiler),
+                    self.dist(stage),
+                ];
+
+                // If we're testing the build triple, then we know we can
+                // actually run binaries and such, so we run all possible tests
+                // that we know about.
+                if self.target == build.config.build {
+                    base.extend(vec![
+                        // docs-related
+                        self.check_docs(compiler),
+                        self.check_error_index(compiler),
+                        self.check_rustdoc(compiler),
+
+                        // UI-related
+                        self.check_cfail(compiler),
+                        self.check_pfail(compiler),
+                        self.check_ui(compiler),
+
+                        // codegen-related
+                        self.check_incremental(compiler),
+                        self.check_codegen(compiler),
+                        self.check_codegen_units(compiler),
+
+                        // misc compiletest-test suites
+                        self.check_rpass_full(compiler),
+                        self.check_rfail_full(compiler),
+                        self.check_cfail_full(compiler),
+                        self.check_pretty_rpass_full(compiler),
+                        self.check_pretty_rfail_full(compiler),
+                        self.check_rpass_valgrind(compiler),
+                        self.check_rmake(compiler),
+
+                        // crates
+                        self.check_crate_rustc(compiler),
+
+                        // pretty
+                        self.check_pretty(compiler),
+                        self.check_pretty_rpass(compiler),
+                        self.check_pretty_rfail(compiler),
+                        self.check_pretty_rpass_valgrind(compiler),
+
+                        // misc
+                        self.check_linkcheck(stage),
+                        self.check_tidy(stage),
+                    ]);
+                }
+                return base
+            }
+            Source::CheckLinkcheck { stage } => {
+                vec![self.tool_linkchecker(stage), self.doc(stage)]
+            }
+            Source::CheckCargoTest { stage } => {
+                vec![self.tool_cargotest(stage),
+                     self.librustc(self.compiler(stage))]
+            }
+            Source::CheckTidy { stage } => {
+                vec![self.tool_tidy(stage)]
+            }
+            Source::CheckPrettyRPass { compiler } |
+            Source::CheckPrettyRFail { compiler } |
+            Source::CheckRFail { compiler } |
+            Source::CheckPFail { compiler } |
+            Source::CheckCodegen { compiler } |
+            Source::CheckCodegenUnits { compiler } |
+            Source::CheckIncremental { compiler } |
+            Source::CheckUi { compiler } |
+            Source::CheckRustdoc { compiler } |
+            Source::CheckPretty { compiler } |
+            Source::CheckCFail { compiler } |
+            Source::CheckRPassValgrind { compiler } |
+            Source::CheckRPass { compiler } => {
+                let mut base = vec![
+                    self.libtest(compiler),
+                    self.target(compiler.host).tool_compiletest(compiler.stage),
+                    self.test_helpers(()),
+                ];
+                if self.target.contains("android") {
+                    base.push(self.android_copy_libs(compiler));
+                }
+                base
+            }
+            Source::CheckDebuginfo { compiler } => {
+                vec![
+                    self.libtest(compiler),
+                    self.target(compiler.host).tool_compiletest(compiler.stage),
+                    self.test_helpers(()),
+                    self.debugger_scripts(compiler.stage),
+                ]
+            }
+            Source::CheckRPassFull { compiler } |
+            Source::CheckRFailFull { compiler } |
+            Source::CheckCFailFull { compiler } |
+            Source::CheckPrettyRPassFull { compiler } |
+            Source::CheckPrettyRFailFull { compiler } |
+            Source::CheckPrettyRPassValgrind { compiler } |
+            Source::CheckRMake { compiler } => {
+                vec![self.librustc(compiler),
+                     self.target(compiler.host).tool_compiletest(compiler.stage)]
+            }
+            Source::CheckDocs { compiler } => {
+                vec![self.libstd(compiler)]
+            }
+            Source::CheckErrorIndex { compiler } => {
+                vec![self.libstd(compiler),
+                     self.target(compiler.host).tool_error_index(compiler.stage)]
+            }
+            Source::CheckCrateStd { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+            Source::CheckCrateTest { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+            Source::CheckCrateRustc { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+
+            Source::ToolLinkchecker { stage } |
+            Source::ToolTidy { stage } => {
+                vec![self.libstd(self.compiler(stage))]
+            }
+            Source::ToolErrorIndex { stage } |
+            Source::ToolRustbook { stage } => {
+                vec![self.librustc(self.compiler(stage))]
+            }
+            Source::ToolCargoTest { stage } => {
+                vec![self.libstd(self.compiler(stage))]
+            }
+            Source::ToolCompiletest { stage } => {
+                vec![self.libtest(self.compiler(stage))]
+            }
+
+            Source::DistDocs { stage } => vec![self.doc(stage)],
+            Source::DistMingw { _dummy: _ } => Vec::new(),
+            Source::DistRustc { stage } => {
+                vec![self.rustc(stage)]
+            }
+            Source::DistStd { compiler } => {
+                // We want to package up as many target libraries as possible
+                // for the `rust-std` package, so if this is a host target we
+                // depend on librustc and otherwise we just depend on libtest.
+                if build.config.host.iter().any(|t| t == self.target) {
+                    vec![self.librustc(compiler)]
+                } else {
+                    vec![self.libtest(compiler)]
+                }
+            }
+
+            Source::Dist { stage } => {
+                let mut base = Vec::new();
+
+                for host in build.config.host.iter() {
+                    let host = self.target(host);
+                    base.push(host.dist_rustc(stage));
+                    if host.target.contains("windows-gnu") {
+                        base.push(host.dist_mingw(()));
+                    }
+
+                    let compiler = self.compiler(stage);
+                    for target in build.config.target.iter() {
+                        let target = self.target(target);
+                        if build.config.docs {
+                            base.push(target.dist_docs(stage));
+                        }
+                        base.push(target.dist_std(compiler));
+                    }
+                }
+                return base
+            }
+
+            Source::AndroidCopyLibs { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+        }
+    }
+}
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
new file mode 100644 (file)
index 0000000..3ef7f8c
--- /dev/null
@@ -0,0 +1,142 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Various utility functions used throughout rustbuild.
+//!
+//! Simple things like testing the various filesystem operations here and there,
+//! not a lot of interesting happenings here unfortunately.
+
+use std::env;
+use std::ffi::OsString;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use filetime::FileTime;
+
+/// Returns the `name` as the filename of a static library for `target`.
+pub fn staticlib(name: &str, target: &str) -> String {
+    if target.contains("windows-msvc") {
+        format!("{}.lib", name)
+    } else {
+        format!("lib{}.a", name)
+    }
+}
+
+/// Returns the last-modified time for `path`, or zero if it doesn't exist.
+pub fn mtime(path: &Path) -> FileTime {
+    fs::metadata(path).map(|f| {
+        FileTime::from_last_modification_time(&f)
+    }).unwrap_or(FileTime::zero())
+}
+
+/// Copies a file from `src` to `dst`, attempting to use hard links and then
+/// falling back to an actually filesystem copy if necessary.
+pub fn copy(src: &Path, dst: &Path) {
+    let res = fs::hard_link(src, dst);
+    let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
+    if let Err(e) = res {
+        panic!("failed to copy `{}` to `{}`: {}", src.display(),
+               dst.display(), e)
+    }
+}
+
+/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
+/// when this function is called.
+pub fn cp_r(src: &Path, dst: &Path) {
+    for f in t!(fs::read_dir(src)) {
+        let f = t!(f);
+        let path = f.path();
+        let name = path.file_name().unwrap();
+        let dst = dst.join(name);
+        if t!(f.file_type()).is_dir() {
+            let _ = fs::remove_dir_all(&dst);
+            t!(fs::create_dir(&dst));
+            cp_r(&path, &dst);
+        } else {
+            let _ = fs::remove_file(&dst);
+            copy(&path, &dst);
+        }
+    }
+}
+
+/// Given an executable called `name`, return the filename for the
+/// executable for a particular target.
+pub fn exe(name: &str, target: &str) -> String {
+    if target.contains("windows") {
+        format!("{}.exe", name)
+    } else {
+        name.to_string()
+    }
+}
+
+/// Returns whether the file name given looks like a dynamic library.
+pub fn is_dylib(name: &str) -> bool {
+    name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
+}
+
+/// Returns the corresponding relative library directory that the compiler's
+/// dylibs will be found in.
+pub fn libdir(target: &str) -> &'static str {
+    if target.contains("windows") {"bin"} else {"lib"}
+}
+
+/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
+pub fn add_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
+    let mut list = dylib_path();
+    for path in path {
+        list.insert(0, path);
+    }
+    cmd.env(dylib_path_var(), t!(env::join_paths(list)));
+}
+
+/// Returns whether `dst` is up to date given that the file or files in `src`
+/// are used to generate it.
+///
+/// Uses last-modified time checks to verify this.
+pub fn up_to_date(src: &Path, dst: &Path) -> bool {
+    let threshold = mtime(dst);
+    let meta = t!(fs::metadata(src));
+    if meta.is_dir() {
+        dir_up_to_date(src, &threshold)
+    } else {
+        FileTime::from_last_modification_time(&meta) <= threshold
+    }
+}
+
+fn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {
+    t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {
+        let meta = t!(e.metadata());
+        if meta.is_dir() {
+            dir_up_to_date(&e.path(), threshold)
+        } else {
+            FileTime::from_last_modification_time(&meta) < *threshold
+        }
+    })
+}
+
+/// Returns the environment variable which the dynamic library lookup path
+/// resides in for this platform.
+pub fn dylib_path_var() -> &'static str {
+    if cfg!(target_os = "windows") {
+        "PATH"
+    } else if cfg!(target_os = "macos") {
+        "DYLD_LIBRARY_PATH"
+    } else {
+        "LD_LIBRARY_PATH"
+    }
+}
+
+/// Parses the `dylib_path_var()` environment variable, returning a list of
+/// paths that are members of this lookup path.
+pub fn dylib_path() -> Vec<PathBuf> {
+    env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
+        .collect()
+}
index a6b4e9492181c790fe5a3c040ca6e02397824d8f..e8c88b7db0699d1ebe03b1827c29c4607aac3dd2 100644 (file)
@@ -339,7 +339,7 @@ fn call_with_ref<'a, F>(some_closure:F) -> i32
     where F: Fn(&'a 32) -> i32 {
 ```
 
-However this presents a problem with in our case. When you specify the explict
+However this presents a problem with in our case. When you specify the explicit
 lifetime on a function it binds that lifetime to the *entire* scope of the function
 instead of just the invocation scope of our closure. This means that the borrow checker
 will see a mutable reference in the same lifetime as our immutable reference and fail
@@ -354,7 +354,7 @@ fn call_with_ref<F>(some_closure:F) -> i32
 ```
 
 This lets the Rust compiler find the minimum lifetime to invoke our closure and
-satisfy the borrow checker's rules. Our function then compiles and excutes as we
+satisfy the borrow checker's rules. Our function then compiles and executes as we
 expect.
 
 ```rust
index a6ff75db89b88ddccbefe9f1b315df47d9e14240..78ab3c18e4561988429bdb55ffbec9da21eb8caf 100644 (file)
@@ -41,8 +41,9 @@ they get set in the [`[features]` section][features] of your `Cargo.toml`:
 # no features by default
 default = []
 
-# The “secure-password” feature depends on the bcrypt package.
-secure-password = ["bcrypt"]
+# Add feature "foo" here, then you can use it. 
+# Our "foo" feature depends on nothing else.
+foo = []
 ```
 
 When you do this, Cargo passes along a flag to `rustc`:
index 3c6643fbfe1554e0ae02c5bc551f0a04353715c2..6292ba9aac40317c41e590bbc58d549e2f6df179 100644 (file)
@@ -486,6 +486,17 @@ you have a module in `foo.rs`, you'll often open its code and see this:
 //! The `foo` module contains a lot of useful functionality blah blah blah
 ```
 
+### Crate documentation
+
+Crates can be documented by placing an inner doc comment (`//!`) at the
+beginning of the crate root, aka `lib.rs`:
+
+```rust
+//! This is documentation for the `foo` crate.
+//!
+//! The foo crate is meant to be used for bar.
+```
+
 ### Documentation comment style
 
 Check out [RFC 505][rfc505] for full conventions around the style and format of
index e7d05a8d93a561eccb11f340ced8c8caac0b43af..700ab2be589326f5b30521f66cbc94705ae5c341 100644 (file)
@@ -11,7 +11,7 @@ an Internet connection to run the commands in this section, as we’ll be
 downloading Rust from the Internet.
 
 We’ll be showing off a number of commands using a terminal, and those lines all
-start with `$`. We don't need to type in the `$`s, they are there to indicate
+start with `$`. You don't need to type in the `$`s, they are there to indicate
 the start of each command. We’ll see many tutorials and examples around the web
 that follow this convention: `$` for commands run as our regular user, and `#`
 for commands we should be running as an administrator.
@@ -159,9 +159,11 @@ You should see the version number, commit hash, and commit date.
 If you do, Rust has been installed successfully! Congrats!
 
 If you don't and you're on Windows, check that Rust is in your %PATH% system
-variable. If it isn't, run the installer again, select "Change" on the "Change,
-repair, or remove installation" page and ensure "Add to PATH" is installed on
-the local hard drive.
+variable: `$ echo %PATH%`. If it isn't, run the installer again, select "Change"
+on the "Change, repair, or remove installation" page and ensure "Add to PATH" is
+installed on the local hard drive.  If you need to configure your path manually,
+you can find the Rust executables in a directory like
+`"C:\Program Files\Rust stable GNU 1.x\bin"`.
 
 Rust does not do its own linking, and so you’ll need to have a linker
 installed. Doing so will depend on your specific system, consult its
@@ -339,7 +341,8 @@ On Windows, you'd enter:
 
 ```bash
 $ dir
-main.exe  main.rs
+main.exe
+main.rs
 ```
 
 This shows we have two files: the source code, with an `.rs` extension, and the
@@ -347,7 +350,7 @@ executable (`main.exe` on Windows, `main` everywhere else). All that's left to
 do from here is run the `main` or `main.exe` file, like this:
 
 ```bash
-$ ./main  # or main.exe on Windows
+$ ./main  # or .\main.exe on Windows
 ```
 
 If *main.rs* were your "Hello, world!" program, this would print `Hello,
index c759ff9bdbde48e845a64dfbc158f198bd8178ff..6ce75efd1031d83ce7d372081090542a6091bf9b 100644 (file)
@@ -370,7 +370,7 @@ We could also use a range of versions.
 [Cargo’s documentation][cargodoc] contains more details.
 
 [semver]: http://semver.org
-[cargodoc]: http://doc.crates.io/crates-io.html
+[cargodoc]: http://doc.crates.io/specifying-dependencies.html
 
 Now, without changing any of our code, let’s build our project:
 
index 2c2d89a1fbf9ed3b46c881a19c0108fde7fd2217..a8340d9d31e79d2fe319794ef49f90da988b7721 100644 (file)
@@ -57,7 +57,7 @@ but you must add the right number of `:` if you skip them:
 asm!("xor %eax, %eax"
     :
     :
-    : "{eax}"
+    : "eax"
    );
 # } }
 ```
@@ -68,7 +68,7 @@ Whitespace also doesn't matter:
 # #![feature(asm)]
 # #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 # fn main() { unsafe {
-asm!("xor %eax, %eax" ::: "{eax}");
+asm!("xor %eax, %eax" ::: "eax");
 # } }
 ```
 
@@ -127,7 +127,7 @@ stay valid.
 # #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 # fn main() { unsafe {
 // Put the value 0x200 in eax
-asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "{eax}");
+asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "eax");
 # } }
 ```
 
index e23e6f3a786a5c0e816edb6f27370f3aaf20caf7..e681d1bee06184549861f1045c1b765a1c2a0f7e 100644 (file)
@@ -105,7 +105,7 @@ When you need to keep track of how many times you already looped, you can use th
 #### On ranges:
 
 ```rust
-for (i,j) in (5..10).enumerate() {
+for (i, j) in (5..10).enumerate() {
     println!("i = {} and j = {}", i, j);
 }
 ```
index e46271511462449930ee4859ae7bbd3505d34a36..a0a49d55e105740572194aedfd870f25534b1a50 100644 (file)
@@ -62,8 +62,8 @@ Note that here, the `x` is mutable, but not the `y`.
 # Interior vs. Exterior Mutability
 
 However, when we say something is ‘immutable’ in Rust, that doesn’t mean that
-it’s not able to be changed: we mean something has ‘exterior mutability’. Consider,
-for example, [`Arc<T>`][arc]:
+it’s not able to be changed: we are referring to its ‘exterior mutability’ that
+in this case is immutable. Consider, for example, [`Arc<T>`][arc]:
 
 ```rust
 use std::sync::Arc;
index b2fddf336273fe0d1f3d663e7bd09886579e41fb..328db25b819d89d236af7e8c2d8ea82ce63a5e1b 100644 (file)
@@ -163,11 +163,51 @@ struct Point(i32, i32, i32);
 let black = Color(0, 0, 0);
 let origin = Point(0, 0, 0);
 ```
-Here, `black` and `origin` are not equal, even though they contain the same
-values.
 
-It is almost always better to use a `struct` than a tuple struct. We
-would write `Color` and `Point` like this instead:
+Here, `black` and `origin` are not the same type, even though they contain the
+same values.
+
+The members of a tuple struct may be accessed by dot notation or destructuring
+`let`, just like regular tuples:
+
+```rust
+# struct Color(i32, i32, i32);
+# struct Point(i32, i32, i32);
+# let black = Color(0, 0, 0);
+# let origin = Point(0, 0, 0);
+let black_r = black.0;
+let Point(_, origin_y, origin_z) = origin;
+```
+
+Patterns like `Point(_, origin_y, origin_z)` are also used in
+[match expressions][match].
+
+One case when a tuple struct is very useful is when it has only one element.
+We call this the ‘newtype’ pattern, because it allows you to create a new type
+that is distinct from its contained value and also expresses its own semantic
+meaning:
+
+```rust
+struct Inches(i32);
+
+let length = Inches(10);
+
+let Inches(integer_length) = length;
+println!("length is {} inches", integer_length);
+```
+
+As above, you can extract the inner integer type through a destructuring `let`.
+In this case, the `let Inches(integer_length)` assigns `10` to `integer_length`.
+We could have used dot notation to do the same thing:
+
+```rust
+# struct Inches(i32);
+# let length = Inches(10);
+let integer_length = length.0;
+```
+
+It's always possible to use a `struct` instead of a tuple struct, and can be
+clearer. We could write `Color` and `Point` like this instead:
 
 ```rust
 struct Color {
@@ -187,32 +227,19 @@ Good names are important, and while values in a tuple struct can be
 referenced with dot notation as well, a `struct` gives us actual names,
 rather than positions.
 
-There _is_ one case when a tuple struct is very useful, though, and that is when
-it has only one element. We call this the ‘newtype’ pattern, because
-it allows you to create a new type that is distinct from its contained value
-and also expresses its own semantic meaning:
-
-```rust
-struct Inches(i32);
-
-let length = Inches(10);
-
-let Inches(integer_length) = length;
-println!("length is {} inches", integer_length);
-```
-
-As you can see here, you can extract the inner integer type through a
-destructuring `let`, as with regular tuples. In this case, the
-`let Inches(integer_length)` assigns `10` to `integer_length`.
+[match]: match.html
 
 # Unit-like structs
 
 You can define a `struct` with no members at all:
 
 ```rust
-struct Electron;
+struct Electron {} // use empty braces...
+struct Proton;     // ...or just a semicolon
 
-let x = Electron;
+// whether you declared the struct with braces or not, do the same when creating one
+let x = Electron {};
+let y = Proton;
 ```
 
 Such a `struct` is called ‘unit-like’ because it resembles the empty
index 7954085472e503dc64eceb946a71b89a11e80ab6..86729147ed0652befde62ee0834f12ec2c095d1c 100644 (file)
@@ -431,7 +431,7 @@ one.
 
 Cargo will ignore files in subdirectories of the `tests/` directory.
 Therefore shared modules in integrations tests are possible.
-For example `tests/common/mod.rs` is not seperatly compiled by cargo but can 
+For example `tests/common/mod.rs` is not separately compiled by cargo but can
 be imported in every test with `mod common;`
 
 That's all there is to the `tests` directory. The `tests` module isn't needed
index 33f22e8579664815349f342aa78794639de159e1..554ab66bc563d85328bd8c8303bcf035ac8fb380 100755 (executable)
 
 import gdb
 import re
+import sys
 import debugger_pretty_printers_common as rustpp
 
+# We want a version of `range` which doesn't allocate an intermediate list,
+# specifically it should use a lazy iterator. In Python 2 this was `xrange`, but
+# if we're running with Python 3 then we need to use `range` instead.
+if sys.version_info.major >= 3:
+    xrange = range
+
 #===============================================================================
 # GDB Pretty Printing Module for Rust
 #===============================================================================
@@ -215,7 +222,7 @@ class RustSlicePrinter:
         assert data_ptr.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
         raw_ptr = data_ptr.get_wrapped_value()
 
-        for index in range(0, length):
+        for index in xrange(0, length):
             yield (str(index), (raw_ptr + index).dereference())
 
 
@@ -244,7 +251,7 @@ class RustStdVecPrinter:
     def children(self):
         (length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(self.__val)
         gdb_ptr = data_ptr.get_wrapped_value()
-        for index in range(0, length):
+        for index in xrange(0, length):
             yield (str(index), (gdb_ptr + index).dereference())
 
 
index aab1c0a0e0b39825b16673128729ef46310a5da8..e058ca661692a8d01f8cf9d35939dfe3105ce968 160000 (submodule)
@@ -1 +1 @@
-Subproject commit aab1c0a0e0b39825b16673128729ef46310a5da8
+Subproject commit e058ca661692a8d01f8cf9d35939dfe3105ce968
index a873be455d5558c673c3f6cd4d0d50f89f611bbd..2beb652aa017a6d2c16e959afe05d592a203224d 100644 (file)
@@ -10,7 +10,8 @@
 
 #![allow(deprecated)]
 
-//! Thread-local reference-counted boxes (the `Rc<T>` type).
+//! Unsynchronized reference-counted boxes (the `Rc<T>` type) which are usable
+//! only within a single thread.
 //!
 //! The `Rc<T>` type provides shared ownership of an immutable value.
 //! Destruction is deterministic, and will occur as soon as the last owner is
index 3ebab266e2ffed5494cc069a90172e64833f9b83..dffe9dee022a68704fc565efa10467bc56c2725b 100644 (file)
@@ -1198,17 +1198,15 @@ impl<I: ExactSizeIterator> ExactSizeIterator for Peekable<I> {}
 impl<I: Iterator> Peekable<I> {
     /// Returns a reference to the next() value without advancing the iterator.
     ///
-    /// The `peek()` method will return the value that a call to [`next()`] would
-    /// return, but does not advance the iterator. Like [`next()`], if there is
-    /// a value, it's wrapped in a `Some(T)`, but if the iterator is over, it
-    /// will return `None`.
+    /// Like [`next()`], if there is a value, it is wrapped in a `Some(T)`.
+    /// But if the iteration is over, `None` is returned.
     ///
     /// [`next()`]: trait.Iterator.html#tymethod.next
     ///
-    /// Because `peek()` returns reference, and many iterators iterate over
-    /// references, this leads to a possibly confusing situation where the
+    /// Because `peek()` returns reference, and many iterators iterate over
+    /// references, there can be a possibly confusing situation where the
     /// return value is a double reference. You can see this effect in the
-    /// examples below, with `&&i32`.
+    /// examples below.
     ///
     /// # Examples
     ///
@@ -1225,13 +1223,13 @@ impl<I: Iterator> Peekable<I> {
     ///
     /// assert_eq!(iter.next(), Some(&2));
     ///
-    /// // we can peek() multiple times, the iterator won't advance
+    /// // The iterator does not advance even if we `peek` multiple times
     /// assert_eq!(iter.peek(), Some(&&3));
     /// assert_eq!(iter.peek(), Some(&&3));
     ///
     /// assert_eq!(iter.next(), Some(&3));
     ///
-    /// // after the iterator is finished, so is peek()
+    /// // After the iterator is finished, so is `peek()`
     /// assert_eq!(iter.peek(), None);
     /// assert_eq!(iter.next(), None);
     /// ```
@@ -1263,10 +1261,10 @@ pub fn peek(&mut self) -> Option<&I::Item> {
     ///
     /// let mut iter = xs.iter().peekable();
     ///
-    /// // there are still elements to iterate over
+    /// // There are still elements to iterate over
     /// assert_eq!(iter.is_empty(), false);
     ///
-    /// // let's consume the iterator
+    /// // Let's consume the iterator
     /// iter.next();
     /// iter.next();
     /// iter.next();
index 3549bd6a3bc68f4fcfa52cab27ed36a9817b5378..9b5c2128f1eaf9fd59460d6fc432794f0a05fc26 100644 (file)
@@ -371,13 +371,16 @@ pub trait Extend<A> {
 /// Basic usage:
 ///
 /// ```
-/// let numbers = vec![1, 2, 3];
+/// let numbers = vec![1, 2, 3, 4, 5, 6];
 ///
 /// let mut iter = numbers.iter();
 ///
 /// assert_eq!(Some(&1), iter.next());
-/// assert_eq!(Some(&3), iter.next_back());
-/// assert_eq!(Some(&2), iter.next_back());
+/// assert_eq!(Some(&6), iter.next_back());
+/// assert_eq!(Some(&5), iter.next_back());
+/// assert_eq!(Some(&2), iter.next());
+/// assert_eq!(Some(&3), iter.next());
+/// assert_eq!(Some(&4), iter.next());
 /// assert_eq!(None, iter.next());
 /// assert_eq!(None, iter.next_back());
 /// ```
@@ -395,13 +398,16 @@ pub trait DoubleEndedIterator: Iterator {
     /// Basic usage:
     ///
     /// ```
-    /// let numbers = vec![1, 2, 3];
+    /// let numbers = vec![1, 2, 3, 4, 5, 6];
     ///
     /// let mut iter = numbers.iter();
     ///
     /// assert_eq!(Some(&1), iter.next());
-    /// assert_eq!(Some(&3), iter.next_back());
-    /// assert_eq!(Some(&2), iter.next_back());
+    /// assert_eq!(Some(&6), iter.next_back());
+    /// assert_eq!(Some(&5), iter.next_back());
+    /// assert_eq!(Some(&2), iter.next());
+    /// assert_eq!(Some(&3), iter.next());
+    /// assert_eq!(Some(&4), iter.next());
     /// assert_eq!(None, iter.next());
     /// assert_eq!(None, iter.next_back());
     /// ```
index 79e1462eaa135eb58013a157fb584f6823b5485f..07b05f91f489f9ecfa3752b2175e5f0bc35a3251 100644 (file)
 use num::Float;
 use num::FpCategory as Fp;
 
+/// The radix or base of the internal representation of `f32`.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const RADIX: u32 = 2;
 
+/// Number of significant digits in base 2.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MANTISSA_DIGITS: u32 = 24;
+/// Approximate number of significant digits in base 10.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const DIGITS: u32 = 6;
 
+/// Difference between `1.0` and the next largest representable number.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const EPSILON: f32 = 1.19209290e-07_f32;
 
-/// Smallest finite f32 value
+/// Smallest finite `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN: f32 = -3.40282347e+38_f32;
-/// Smallest positive, normalized f32 value
+/// Smallest positive normal `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN_POSITIVE: f32 = 1.17549435e-38_f32;
-/// Largest finite f32 value
+/// Largest finite `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MAX: f32 = 3.40282347e+38_f32;
 
+/// One greater than the minimum possible normal power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_EXP: i32 = -125;
+/// Maximum possible power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_EXP: i32 = 128;
 
+/// Minimum possible normal power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_10_EXP: i32 = -37;
+/// Maximum possible power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_10_EXP: i32 = 38;
 
+/// Not a Number (NaN).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NAN: f32 = 0.0_f32/0.0_f32;
+/// Infinity (∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const INFINITY: f32 = 1.0_f32/0.0_f32;
+/// Negative infinity (-∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NEG_INFINITY: f32 = -1.0_f32/0.0_f32;
 
 /// Basic mathematical constants.
 pub mod consts {
     // FIXME: replace with mathematical constants from cmath.
 
-    /// Archimedes' constant
+    /// Archimedes' constant (π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const PI: f32 = 3.14159265358979323846264338327950288_f32;
 
-    /// pi/2.0
+    /// π/2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_2: f32 = 1.57079632679489661923132169163975144_f32;
 
-    /// pi/3.0
+    /// π/3
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_3: f32 = 1.04719755119659774615421446109316763_f32;
 
-    /// pi/4.0
+    /// π/4
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_4: f32 = 0.785398163397448309615660845819875721_f32;
 
-    /// pi/6.0
+    /// π/6
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_6: f32 = 0.52359877559829887307710723054658381_f32;
 
-    /// pi/8.0
+    /// π/8
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_8: f32 = 0.39269908169872415480783042290993786_f32;
 
-    /// 1.0/pi
+    /// 1
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_PI: f32 = 0.318309886183790671537767526745028724_f32;
 
-    /// 2.0/pi
+    /// 2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_PI: f32 = 0.636619772367581343075535053490057448_f32;
 
-    /// 2.0/sqrt(pi)
+    /// 2/sqrt(π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_SQRT_PI: f32 = 1.12837916709551257389615890312154517_f32;
 
-    /// sqrt(2.0)
+    /// sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const SQRT_2: f32 = 1.41421356237309504880168872420969808_f32;
 
-    /// 1.0/sqrt(2.0)
+    /// 1/sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_SQRT_2: f32 = 0.707106781186547524400844362104849039_f32;
 
-    /// Euler's number
+    /// Euler's number (e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const E: f32 = 2.71828182845904523536028747135266250_f32;
 
-    /// log2(e)
+    /// log<sub>2</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG2_E: f32 = 1.44269504088896340735992468100189214_f32;
 
-    /// log10(e)
+    /// log<sub>10</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG10_E: f32 = 0.434294481903251827651128918916605082_f32;
 
-    /// ln(2.0)
+    /// ln(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_2: f32 = 0.693147180559945309417232121458176568_f32;
 
-    /// ln(10.0)
+    /// ln(10)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_10: f32 = 2.30258509299404568401799145468436421_f32;
 }
index 35557f61c45420b5ff291aa369876a63e94be7aa..82a09e599e027a49065a342fcaac64fd31da2a79 100644 (file)
 use num::FpCategory as Fp;
 use num::Float;
 
+/// The radix or base of the internal representation of `f64`.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const RADIX: u32 = 2;
 
+/// Number of significant digits in base 2.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MANTISSA_DIGITS: u32 = 53;
+/// Approximate number of significant digits in base 10.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const DIGITS: u32 = 15;
 
+/// Difference between `1.0` and the next largest representable number.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const EPSILON: f64 = 2.2204460492503131e-16_f64;
 
-/// Smallest finite f64 value
+/// Smallest finite `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN: f64 = -1.7976931348623157e+308_f64;
-/// Smallest positive, normalized f64 value
+/// Smallest positive normal `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN_POSITIVE: f64 = 2.2250738585072014e-308_f64;
-/// Largest finite f64 value
+/// Largest finite `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MAX: f64 = 1.7976931348623157e+308_f64;
 
+/// One greater than the minimum possible normal power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_EXP: i32 = -1021;
+/// Maximum possible power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_EXP: i32 = 1024;
 
+/// Minimum possible normal power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_10_EXP: i32 = -307;
+/// Maximum possible power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_10_EXP: i32 = 308;
 
+/// Not a Number (NaN).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NAN: f64 = 0.0_f64/0.0_f64;
+/// Infinity (∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const INFINITY: f64 = 1.0_f64/0.0_f64;
+/// Negative infinity (-∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NEG_INFINITY: f64 = -1.0_f64/0.0_f64;
 
 /// Basic mathematical constants.
 pub mod consts {
     // FIXME: replace with mathematical constants from cmath.
 
-    /// Archimedes' constant
+    /// Archimedes' constant (π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const PI: f64 = 3.14159265358979323846264338327950288_f64;
 
-    /// pi/2.0
+    /// π/2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_2: f64 = 1.57079632679489661923132169163975144_f64;
 
-    /// pi/3.0
+    /// π/3
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_3: f64 = 1.04719755119659774615421446109316763_f64;
 
-    /// pi/4.0
+    /// π/4
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_4: f64 = 0.785398163397448309615660845819875721_f64;
 
-    /// pi/6.0
+    /// π/6
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_6: f64 = 0.52359877559829887307710723054658381_f64;
 
-    /// pi/8.0
+    /// π/8
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_8: f64 = 0.39269908169872415480783042290993786_f64;
 
-    /// 1.0/pi
+    /// 1
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_PI: f64 = 0.318309886183790671537767526745028724_f64;
 
-    /// 2.0/pi
+    /// 2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_PI: f64 = 0.636619772367581343075535053490057448_f64;
 
-    /// 2.0/sqrt(pi)
+    /// 2/sqrt(π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_SQRT_PI: f64 = 1.12837916709551257389615890312154517_f64;
 
-    /// sqrt(2.0)
+    /// sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const SQRT_2: f64 = 1.41421356237309504880168872420969808_f64;
 
-    /// 1.0/sqrt(2.0)
+    /// 1/sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_SQRT_2: f64 = 0.707106781186547524400844362104849039_f64;
 
-    /// Euler's number
+    /// Euler's number (e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const E: f64 = 2.71828182845904523536028747135266250_f64;
 
-    /// log2(e)
+    /// log<sub>2</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
 
-    /// log10(e)
+    /// log<sub>10</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG10_E: f64 = 0.434294481903251827651128918916605082_f64;
 
-    /// ln(2.0)
+    /// ln(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_2: f64 = 0.693147180559945309417232121458176568_f64;
 
-    /// ln(10.0)
+    /// ln(10)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_10: f64 = 2.30258509299404568401799145468436421_f64;
 }
index bd6cfc427affd04a936a2a115440c5989eb7606f..e74c30d5e5af8db76279a044e1051269f2354bbf 100644 (file)
 
 macro_rules! int_module { ($T:ident, $bits:expr) => (
 
+/// The smallest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN: $T = $T::min_value();
+/// The largest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX: $T = $T::max_value();
 
 ) }
index 0d79398a8f1d51bd26e975a0a45fe16fe5b58aab..7dd9ecedb56c8a71e49daff0eb6da3f4d12d7f5e 100644 (file)
@@ -11,7 +11,6 @@
 //! Numeric traits and functions for the built-in numeric types.
 
 #![stable(feature = "rust1", since = "1.0.0")]
-#![allow(missing_docs)]
 
 use char::CharExt;
 use cmp::PartialOrd;
@@ -188,6 +187,12 @@ macro_rules! int_impl {
      $sub_with_overflow:path,
      $mul_with_overflow:path) => {
         /// Returns the smallest value that can be represented by this integer type.
+        ///
+        /// # Examples
+        ///
+        /// ```
+        /// assert_eq!(i8::min_value(), -128);
+        /// ```
         #[stable(feature = "rust1", since = "1.0.0")]
         #[inline]
         pub const fn min_value() -> Self {
@@ -195,6 +200,12 @@ pub const fn min_value() -> Self {
         }
 
         /// Returns the largest value that can be represented by this integer type.
+        ///
+        /// # Examples
+        ///
+        /// ```
+        /// assert_eq!(i8::max_value(), 127);
+        /// ```
         #[stable(feature = "rust1", since = "1.0.0")]
         #[inline]
         pub const fn max_value() -> Self {
@@ -288,6 +299,8 @@ pub fn trailing_zeros(self) -> u32 {
         /// Shifts the bits to the left by a specified amount, `n`,
         /// wrapping the truncated bits to the end of the resulting integer.
         ///
+        /// Please note this isn't the same operation as `<<`!
+        ///
         /// # Examples
         ///
         /// Basic usage:
@@ -308,6 +321,8 @@ pub fn rotate_left(self, n: u32) -> Self {
         /// wrapping the truncated bits to the beginning of the resulting
         /// integer.
         ///
+        /// Please note this isn't the same operation as `>>`!
+        ///
         /// # Examples
         ///
         /// Basic usage:
@@ -1250,11 +1265,23 @@ macro_rules! uint_impl {
      $sub_with_overflow:path,
      $mul_with_overflow:path) => {
         /// Returns the smallest value that can be represented by this integer type.
+        ///
+        /// # Examples
+        ///
+        /// ```
+        /// assert_eq!(u8::min_value(), 0);
+        /// ```
         #[stable(feature = "rust1", since = "1.0.0")]
         #[inline]
         pub const fn min_value() -> Self { 0 }
 
         /// Returns the largest value that can be represented by this integer type.
+        ///
+        /// # Examples
+        ///
+        /// ```
+        /// assert_eq!(u8::max_value(), 255);
+        /// ```
         #[stable(feature = "rust1", since = "1.0.0")]
         #[inline]
         pub const fn max_value() -> Self { !0 }
@@ -1361,6 +1388,8 @@ pub fn trailing_zeros(self) -> u32 {
         /// Shifts the bits to the left by a specified amount, `n`,
         /// wrapping the truncated bits to the end of the resulting integer.
         ///
+        /// Please note this isn't the same operation as `<<`!
+        ///
         /// # Examples
         ///
         /// Basic usage:
@@ -1383,6 +1412,8 @@ pub fn rotate_left(self, n: u32) -> Self {
         /// wrapping the truncated bits to the beginning of the resulting
         /// integer.
         ///
+        /// Please note this isn't the same operation as `>>`!
+        ///
         /// # Examples
         ///
         /// Basic usage:
index 2ab2f9548ef1bfd5ec67ccfb79b1a43efa333329..cc9256ab6bf4ee34fd5e2126eb8f9e66e77461b5 100644 (file)
 
 macro_rules! uint_module { ($T:ident, $bits:expr) => (
 
+/// The smallest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN: $T = $T::min_value();
+/// The largest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX: $T = $T::max_value();
 
 ) }
index 7753aae147a88d722d532d7198e1d52bf7b3e9cd..9347ac2a8c82f0eb40d953f9318c39018ee72fa7 100644 (file)
@@ -1929,7 +1929,7 @@ pub trait FnMut<Args> : FnOnce<Args> {
 #[fundamental] // so that regex can rely that `&str: !FnMut`
 pub trait FnOnce<Args> {
     /// The returned type after the call operator is used.
-    #[unstable(feature = "fn_traits", issue = "29625")]
+    #[stable(feature = "fn_once_output", since = "1.12.0")]
     type Output;
 
     /// This is called when the call operator is used.
index 18ea17f48162f1b66c168c00d3246d8b9b7712b3..601d3866b02d452be8808bcb3db6c936fa03d771 100644 (file)
@@ -101,7 +101,6 @@ fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex {
         match pat.node {
             PatKind::Binding(_, _, None) |
             PatKind::Path(..) |
-            PatKind::QPath(..) |
             PatKind::Lit(..) |
             PatKind::Range(..) |
             PatKind::Wild => {
index 72261c473e5c5d6a252733f988b43956f681ef78..218681efb7dc167d033b1da220acc94b901bd215 100644 (file)
@@ -137,15 +137,6 @@ pub fn def_id(&self) -> DefId {
         }
     }
 
-    pub fn variant_def_ids(&self) -> Option<(DefId, DefId)> {
-        match *self {
-            Def::Variant(enum_id, var_id) => {
-                Some((enum_id, var_id))
-            }
-            _ => None
-        }
-    }
-
     pub fn kind_name(&self) -> &'static str {
         match *self {
             Def::Fn(..) => "function",
index 78fd2bbbe0d2590e033ea4d87da820c69aad227f..5e0e6622185f8b50336cc4409b033a5be0f4793b 100644 (file)
@@ -930,12 +930,11 @@ pub fn noop_fold_pat<T: Folder>(p: P<Pat>, folder: &mut T) -> P<Pat> {
                     PatKind::TupleStruct(folder.fold_path(pth),
                             pats.move_map(|x| folder.fold_pat(x)), ddpos)
                 }
-                PatKind::Path(pth) => {
-                    PatKind::Path(folder.fold_path(pth))
-                }
-                PatKind::QPath(qself, pth) => {
-                    let qself = QSelf { ty: folder.fold_ty(qself.ty), ..qself };
-                    PatKind::QPath(qself, folder.fold_path(pth))
+                PatKind::Path(opt_qself, pth) => {
+                    let opt_qself = opt_qself.map(|qself| {
+                        QSelf { ty: folder.fold_ty(qself.ty), position: qself.position }
+                    });
+                    PatKind::Path(opt_qself, folder.fold_path(pth))
                 }
                 PatKind::Struct(pth, fields, etc) => {
                     let pth = folder.fold_path(pth);
index 2d5c4ebf8d898bbc3bf0bff24f945a712f48045c..442c85af22a262967f707484de232e8b06c443ef 100644 (file)
@@ -460,11 +460,10 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
             visitor.visit_path(path, pattern.id);
             walk_list!(visitor, visit_pat, children);
         }
-        PatKind::Path(ref path) => {
-            visitor.visit_path(path, pattern.id);
-        }
-        PatKind::QPath(ref qself, ref path) => {
-            visitor.visit_ty(&qself.ty);
+        PatKind::Path(ref opt_qself, ref path) => {
+            if let Some(ref qself) = *opt_qself {
+                visitor.visit_ty(&qself.ty);
+            }
             visitor.visit_path(path, pattern.id)
         }
         PatKind::Struct(ref path, ref fields, _) => {
index 2cc39412182dc5a771417a8f7e714cdf6cad4a3d..9d124dadb766adb911324f8a5cdc78f1eed57e38 100644 (file)
@@ -862,7 +862,8 @@ fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
                                                       respan(pth1.span, pth1.node.name),
                                                       sub.as_ref().map(|x| this.lower_pat(x)))
                             }
-                            _ => hir::PatKind::Path(hir::Path::from_name(pth1.span, pth1.node.name))
+                            _ => hir::PatKind::Path(None, hir::Path::from_name(pth1.span,
+                                                                               pth1.node.name))
                         }
                     })
                 }
@@ -872,15 +873,11 @@ fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
                                               pats.iter().map(|x| self.lower_pat(x)).collect(),
                                               ddpos)
                 }
-                PatKind::Path(None, ref pth) => {
-                    hir::PatKind::Path(self.lower_path(pth))
-                }
-                PatKind::Path(Some(ref qself), ref pth) => {
-                    let qself = hir::QSelf {
-                        ty: self.lower_ty(&qself.ty),
-                        position: qself.position,
-                    };
-                    hir::PatKind::QPath(qself, self.lower_path(pth))
+                PatKind::Path(ref opt_qself, ref path) => {
+                    let opt_qself = opt_qself.as_ref().map(|qself| {
+                        hir::QSelf { ty: self.lower_ty(&qself.ty), position: qself.position }
+                    });
+                    hir::PatKind::Path(opt_qself, self.lower_path(path))
                 }
                 PatKind::Struct(ref pth, ref fields, etc) => {
                     let pth = self.lower_path(pth);
@@ -1831,7 +1828,7 @@ fn pat_enum(&mut self, span: Span, path: hir::Path, subpats: hir::HirVec<P<hir::
                 -> P<hir::Pat> {
         let def = self.resolver.resolve_generated_global_path(&path, true);
         let pt = if subpats.is_empty() {
-            hir::PatKind::Path(path)
+            hir::PatKind::Path(None, path)
         } else {
             hir::PatKind::TupleStruct(path, subpats, None)
         };
index a139dd152f006f5bcc51d26c0d26a1cb33ea7a21..655f80ec07238ff9ba97fdb99057415fd6dec35b 100644 (file)
@@ -487,8 +487,7 @@ fn walk_<G>(&self, it: &mut G) -> bool
             PatKind::Lit(_) |
             PatKind::Range(_, _) |
             PatKind::Binding(..) |
-            PatKind::Path(..) |
-            PatKind::QPath(_, _) => {
+            PatKind::Path(..) => {
                 true
             }
         }
@@ -538,15 +537,9 @@ pub enum PatKind {
     /// 0 <= position <= subpats.len()
     TupleStruct(Path, HirVec<P<Pat>>, Option<usize>),
 
-    /// A path pattern.
+    /// A possibly qualified path pattern.
     /// Such pattern can be resolved to a unit struct/variant or a constant.
-    Path(Path),
-
-    /// An associated const named using the qualified path `<T>::CONST` or
-    /// `<T as Trait>::CONST`. Associated consts from inherent impls can be
-    /// referred to as simply `T::CONST`, in which case they will end up as
-    /// PatKind::Path, and the resolver will have to sort that out.
-    QPath(QSelf, Path),
+    Path(Option<QSelf>, Path),
 
     /// A tuple pattern `(a, b)`.
     /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
@@ -836,7 +829,7 @@ pub enum Expr_ {
     ExprVec(HirVec<P<Expr>>),
     /// A function call
     ///
-    /// The first field resolves to the function itself,
+    /// The first field resolves to the function itself (usually an `ExprPath`),
     /// and the second field is the list of arguments
     ExprCall(P<Expr>, HirVec<P<Expr>>),
     /// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`)
@@ -845,9 +838,9 @@ pub enum Expr_ {
     /// The vector of `Ty`s are the ascripted type parameters for the method
     /// (within the angle brackets).
     ///
-    /// The first element of the vector of `Expr`s is the expression that evaluates
-    /// to the object on which the method is being called on (the receiver),
-    /// and the remaining elements are the rest of the arguments.
+    /// The first element of the vector of `Expr`s is the expression that
+    /// evaluates to the object on which the method is being called on (the
+    /// receiver), and the remaining elements are the rest of the arguments.
     ///
     /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
     /// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`.
@@ -919,13 +912,13 @@ pub enum Expr_ {
     /// Inline assembly (from `asm!`), with its outputs and inputs.
     ExprInlineAsm(InlineAsm, Vec<P<Expr>>, Vec<P<Expr>>),
 
-    /// A struct literal expression.
+    /// A struct or struct-like variant literal expression.
     ///
     /// For example, `Foo {x: 1, y: 2}`, or
     /// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
     ExprStruct(Path, HirVec<Field>, Option<P<Expr>>),
 
-    /// A vector literal constructed from one repeated element.
+    /// An array literal constructed from one repeated element.
     ///
     /// For example, `[1; 5]`. The first expression is the element
     /// to be repeated; the second is the number of times to repeat it.
@@ -950,14 +943,21 @@ pub struct QSelf {
     pub position: usize,
 }
 
+/// Hints at the original code for a `match _ { .. }`
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum MatchSource {
+    /// A `match _ { .. }`
     Normal,
+    /// An `if let _ = _ { .. }` (optionally with `else { .. }`)
     IfLetDesugar {
         contains_else_clause: bool,
     },
+    /// A `while let _ = _ { .. }` (which was desugared to a
+    /// `loop { match _ { .. } }`)
     WhileLetDesugar,
+    /// A desugared `for _ in _ { .. }` loop
     ForLoopDesugar,
+    /// A desugared `?` operator
     TryDesugar,
 }
 
@@ -975,8 +975,7 @@ pub struct MutTy {
     pub mutbl: Mutability,
 }
 
-/// Represents a method's signature in a trait declaration,
-/// or in an implementation.
+/// Represents a method's signature in a trait declaration or implementation.
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct MethodSig {
     pub unsafety: Unsafety,
@@ -999,13 +998,20 @@ pub struct TraitItem {
     pub span: Span,
 }
 
+/// Represents a trait method or associated constant or type
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum TraitItem_ {
+    /// An associated constant with an optional value (otherwise `impl`s
+    /// must contain a value)
     ConstTraitItem(P<Ty>, Option<P<Expr>>),
+    /// A method with an optional body
     MethodTraitItem(MethodSig, Option<P<Block>>),
+    /// An associated type with (possibly empty) bounds and optional concrete
+    /// type
     TypeTraitItem(TyParamBounds, Option<P<Ty>>),
 }
 
+/// Represents anything within an `impl` block
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct ImplItem {
     pub id: NodeId,
@@ -1017,10 +1023,15 @@ pub struct ImplItem {
     pub span: Span,
 }
 
+/// Represents different contents within `impl`s
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum ImplItemKind {
+    /// An associated constant of the given type, set to the constant result
+    /// of the expression
     Const(P<Ty>, P<Expr>),
+    /// A method implementation with the given signature and body
     Method(MethodSig, P<Block>),
+    /// An associated type
     Type(P<Ty>),
 }
 
index 3bb9b6d260255065bee28fc7986777812297d13e..593d10ef4f7c4a9910c020b9a4218dc5f345fbba 100644 (file)
 use hir::def_id::DefId;
 use hir::{self, PatKind};
 use ty::TyCtxt;
-use util::nodemap::FnvHashMap;
 use syntax::ast;
 use syntax::codemap::Spanned;
 use syntax_pos::{Span, DUMMY_SP};
 
 use std::iter::{Enumerate, ExactSizeIterator};
 
-pub type PatIdMap = FnvHashMap<ast::Name, ast::NodeId>;
-
 pub struct EnumerateAndAdjust<I> {
     enumerate: Enumerate<I>,
     gap_pos: usize,
@@ -56,7 +53,7 @@ fn enumerate_and_adjust(self, expected_len: usize, gap_pos: Option<usize>)
 
 pub fn pat_is_refutable(dm: &DefMap, pat: &hir::Pat) -> bool {
     match pat.node {
-        PatKind::Lit(_) | PatKind::Range(_, _) | PatKind::QPath(..) => true,
+        PatKind::Lit(_) | PatKind::Range(_, _) | PatKind::Path(Some(..), _) => true,
         PatKind::TupleStruct(..) |
         PatKind::Path(..) |
         PatKind::Struct(..) => {
@@ -70,23 +67,9 @@ pub fn pat_is_refutable(dm: &DefMap, pat: &hir::Pat) -> bool {
     }
 }
 
-pub fn pat_is_variant_or_struct(dm: &DefMap, pat: &hir::Pat) -> bool {
-    match pat.node {
-        PatKind::TupleStruct(..) |
-        PatKind::Path(..) |
-        PatKind::Struct(..) => {
-            match dm.get(&pat.id).map(|d| d.full_def()) {
-                Some(Def::Variant(..)) | Some(Def::Struct(..)) | Some(Def::TyAlias(..)) => true,
-                _ => false
-            }
-        }
-        _ => false
-    }
-}
-
 pub fn pat_is_const(dm: &DefMap, pat: &hir::Pat) -> bool {
     match pat.node {
-        PatKind::Path(..) | PatKind::QPath(..) => {
+        PatKind::Path(..) => {
             match dm.get(&pat.id).map(|d| d.full_def()) {
                 Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => true,
                 _ => false
@@ -96,22 +79,6 @@ pub fn pat_is_const(dm: &DefMap, pat: &hir::Pat) -> bool {
     }
 }
 
-// Same as above, except that partially-resolved defs cause `false` to be
-// returned instead of a panic.
-pub fn pat_is_resolved_const(dm: &DefMap, pat: &hir::Pat) -> bool {
-    match pat.node {
-        PatKind::Path(..) | PatKind::QPath(..) => {
-            match dm.get(&pat.id)
-                    .and_then(|d| if d.depth == 0 { Some(d.base_def) }
-                                  else { None } ) {
-                Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => true,
-                _ => false
-            }
-        }
-        _ => false
-    }
-}
-
 /// Call `f` on every "binding" in a pattern, e.g., on `a` in
 /// `match foo() { Some(a) => (), None => () }`
 pub fn pat_bindings<F>(pat: &hir::Pat, mut f: F)
index bf6188faa2fbdf160d7c60032e0bb8613c186042..5f2fac5c01b30df8b719ef301e13cafde1a1cbbd 100644 (file)
@@ -1750,10 +1750,10 @@ pub fn print_pat(&mut self, pat: &hir::Pat) -> io::Result<()> {
                 }
                 try!(self.pclose());
             }
-            PatKind::Path(ref path) => {
+            PatKind::Path(None, ref path) => {
                 self.print_path(path, true, 0)?;
             }
-            PatKind::QPath(ref qself, ref path) => {
+            PatKind::Path(Some(ref qself), ref path) => {
                 self.print_qpath(path, qself, false)?;
             }
             PatKind::Struct(ref path, ref fields, etc) => {
index c8b8c5dbdbbcbc878f2d51cc386a49475253a80f..6551e0129f88493dacffff9044ca34b10ea112db 100644 (file)
@@ -945,52 +945,41 @@ fn determine_pat_move_mode(&mut self,
     /// The core driver for walking a pattern; `match_mode` must be
     /// established up front, e.g. via `determine_pat_move_mode` (see
     /// also `walk_irrefutable_pat` for patterns that stand alone).
-    fn walk_pat(&mut self,
-                cmt_discr: mc::cmt<'tcx>,
-                pat: &hir::Pat,
-                match_mode: MatchMode) {
-        debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr,
-               pat);
+    fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
+        debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
 
         let tcx = &self.tcx();
         let mc = &self.mc;
         let infcx = self.mc.infcx;
         let delegate = &mut self.delegate;
         return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
-            match pat.node {
-                PatKind::Binding(bmode, _, _) => {
-                    debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}",
-                           cmt_pat,
-                           pat,
-                           match_mode);
-
-                    // pat_ty: the type of the binding being produced.
-                    let pat_ty = return_if_err!(infcx.node_ty(pat.id));
-
-                    // Each match binding is effectively an assignment to the
-                    // binding being produced.
-                    if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty,
-                                                        tcx.expect_def(pat.id)) {
-                        delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
-                    }
+            if let PatKind::Binding(bmode, _, _) = pat.node {
+                debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
 
-                    // It is also a borrow or copy/move of the value being matched.
-                    match bmode {
-                        hir::BindByRef(m) => {
-                            if let ty::TyRef(&r, _) = pat_ty.sty {
-                                let bk = ty::BorrowKind::from_mutbl(m);
-                                delegate.borrow(pat.id, pat.span, cmt_pat,
-                                                r, bk, RefBinding);
-                            }
-                        }
-                        hir::BindByValue(..) => {
-                            let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
-                            debug!("walk_pat binding consuming pat");
-                            delegate.consume_pat(pat, cmt_pat, mode);
+                // pat_ty: the type of the binding being produced.
+                let pat_ty = return_if_err!(infcx.node_ty(pat.id));
+
+                // Each match binding is effectively an assignment to the
+                // binding being produced.
+                if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty,
+                                                    tcx.expect_def(pat.id)) {
+                    delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
+                }
+
+                // It is also a borrow or copy/move of the value being matched.
+                match bmode {
+                    hir::BindByRef(m) => {
+                        if let ty::TyRef(&r, _) = pat_ty.sty {
+                            let bk = ty::BorrowKind::from_mutbl(m);
+                            delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
                         }
                     }
+                    hir::BindByValue(..) => {
+                        let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
+                        debug!("walk_pat binding consuming pat");
+                        delegate.consume_pat(pat, cmt_pat, mode);
+                    }
                 }
-                _ => {}
             }
         }));
 
@@ -999,72 +988,23 @@ fn walk_pat(&mut self,
         // to the above loop's visit of than the bindings that form
         // the leaves of the pattern tree structure.
         return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
-            match pat.node {
-                PatKind::Struct(..) | PatKind::TupleStruct(..) |
-                PatKind::Path(..) | PatKind::QPath(..) => {
-                    match tcx.expect_def(pat.id) {
-                        Def::Variant(enum_did, variant_did) => {
-                            let downcast_cmt =
-                                if tcx.lookup_adt_def(enum_did).is_univariant() {
-                                    cmt_pat
-                                } else {
-                                    let cmt_pat_ty = cmt_pat.ty;
-                                    mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
-                                };
-
-                            debug!("variant downcast_cmt={:?} pat={:?}",
-                                   downcast_cmt,
-                                   pat);
-
-                            delegate.matched_pat(pat, downcast_cmt, match_mode);
-                        }
-
-                        Def::Struct(..) | Def::TyAlias(..) => {
-                            // A struct (in either the value or type
-                            // namespace; we encounter the former on
-                            // e.g. patterns for unit structs).
-
-                            debug!("struct cmt_pat={:?} pat={:?}",
-                                   cmt_pat,
-                                   pat);
-
-                            delegate.matched_pat(pat, cmt_pat, match_mode);
-                        }
-
-                        Def::Const(..) | Def::AssociatedConst(..) => {
-                            // This is a leaf (i.e. identifier binding
-                            // or constant value to match); thus no
-                            // `matched_pat` call.
-                        }
+            match tcx.expect_def_or_none(pat.id) {
+                Some(Def::Variant(enum_did, variant_did)) => {
+                    let downcast_cmt = if tcx.lookup_adt_def(enum_did).is_univariant() {
+                        cmt_pat
+                    } else {
+                        let cmt_pat_ty = cmt_pat.ty;
+                        mc.cat_downcast(pat, cmt_pat, cmt_pat_ty, variant_did)
+                    };
 
-                        def => {
-                            // An enum type should never be in a pattern.
-                            // Remaining cases are e.g. Def::Fn, to
-                            // which identifiers within patterns
-                            // should not resolve. However, we do
-                            // encouter this when using the
-                            // expr-use-visitor during typeck. So just
-                            // ignore it, an error should have been
-                            // reported.
-
-                            if !tcx.sess.has_errors() {
-                                span_bug!(pat.span,
-                                          "Pattern has unexpected def: {:?} and type {:?}",
-                                          def,
-                                          cmt_pat.ty);
-                            }
-                        }
-                    }
+                    debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
+                    delegate.matched_pat(pat, downcast_cmt, match_mode);
                 }
-
-                PatKind::Wild | PatKind::Tuple(..) | PatKind::Box(..) |
-                PatKind::Ref(..) | PatKind::Lit(..) | PatKind::Range(..) |
-                PatKind::Vec(..) | PatKind::Binding(..) => {
-                    // Each of these cases does not
-                    // correspond to an enum variant or struct, so we
-                    // do not do any `matched_pat` calls for these
-                    // cases either.
+                Some(Def::Struct(..)) | Some(Def::TyAlias(..)) | Some(Def::AssociatedTy(..)) => {
+                    debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
+                    delegate.matched_pat(pat, cmt_pat, match_mode);
                 }
+                _ => {}
             }
         }));
     }
index a70829347f1c155deb61449e060f68efe52734bd..28bfb460a14faf75c9fdbbe10fb5a195cfc3e600 100644 (file)
@@ -1050,9 +1050,8 @@ pub fn cat_pattern<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, mut op: F) -> McRes
     }
 
     // FIXME(#19596) This is a workaround, but there should be a better way to do this
-    fn cat_pattern_<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F)
-                       -> McResult<()>
-        where F : FnMut(&MemCategorizationContext<'a, 'gcx, 'tcx>, cmt<'tcx>, &hir::Pat),
+    fn cat_pattern_<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F) -> McResult<()>
+        where F : FnMut(&MemCategorizationContext<'a, 'gcx, 'tcx>, cmt<'tcx>, &hir::Pat)
     {
         // Here, `cmt` is the categorization for the value being
         // matched and pat is the pattern it is being matched against.
@@ -1099,21 +1098,14 @@ fn cat_pattern_<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F)
         // step out of sync again. So you'll see below that we always
         // get the type of the *subpattern* and use that.
 
-        debug!("cat_pattern: {:?} cmt={:?}",
-               pat,
-               cmt);
-
-        (*op)(self, cmt.clone(), pat);
+        debug!("cat_pattern: {:?} cmt={:?}", pat, cmt);
 
-        let opt_def = self.tcx().expect_def_or_none(pat.id);
-        if opt_def == Some(Def::Err) {
-            return Err(());
-        }
+        op(self, cmt.clone(), pat);
 
         // Note: This goes up here (rather than within the PatKind::TupleStruct arm
-        // alone) because struct patterns can refer to struct types or
-        // to struct variants within enums.
-        let cmt = match opt_def {
+        // alone) because PatKind::Struct can also refer to variants.
+        let cmt = match self.tcx().expect_def_or_none(pat.id) {
+            Some(Def::Err) => return Err(()),
             Some(Def::Variant(enum_did, variant_did))
                 // univariant enums do not need downcasts
                 if !self.tcx().lookup_adt_def(enum_did).is_univariant() => {
@@ -1123,66 +1115,33 @@ fn cat_pattern_<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F)
         };
 
         match pat.node {
-          PatKind::Wild => {
-            // _
-          }
-
           PatKind::TupleStruct(_, ref subpats, ddpos) => {
-            match opt_def {
-                Some(Def::Variant(enum_def, def_id)) => {
-                    // variant(x, y, z)
-                    let expected_len = self.tcx().lookup_adt_def(enum_def)
-                                                 .variant_with_id(def_id).fields.len();
-                    for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
-                        let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
-
-                        let subcmt =
-                            self.cat_imm_interior(
-                                pat, cmt.clone(), subpat_ty,
-                                InteriorField(PositionalField(i)));
-
-                        self.cat_pattern_(subcmt, &subpat, op)?;
-                    }
+            let expected_len = match self.tcx().expect_def(pat.id) {
+                Def::Variant(enum_def, def_id) => {
+                    self.tcx().lookup_adt_def(enum_def).variant_with_id(def_id).fields.len()
                 }
-                Some(Def::Struct(..)) => {
-                    let expected_len = match self.pat_ty(&pat)?.sty {
+                Def::Struct(..) => {
+                    match self.pat_ty(&pat)?.sty {
                         ty::TyStruct(adt_def, _) => {
                             adt_def.struct_variant().fields.len()
                         }
                         ref ty => {
                             span_bug!(pat.span, "tuple struct pattern unexpected type {:?}", ty);
                         }
-                    };
-
-                    for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
-                        let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
-                        let cmt_field =
-                            self.cat_imm_interior(
-                                pat, cmt.clone(), subpat_ty,
-                                InteriorField(PositionalField(i)));
-                        self.cat_pattern_(cmt_field, &subpat, op)?;
                     }
                 }
-                Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {
-                    for subpat in subpats {
-                        self.cat_pattern_(cmt.clone(), &subpat, op)?;
-                    }
-                }
-                _ => {
-                    span_bug!(
-                        pat.span,
-                        "enum pattern didn't resolve to enum or struct {:?}",
-                        opt_def);
+                def => {
+                    span_bug!(pat.span, "tuple struct pattern didn't resolve \
+                                         to variant or struct {:?}", def);
                 }
-            }
-          }
-
-          PatKind::Path(..) | PatKind::QPath(..) | PatKind::Binding(_, _, None) => {
-              // Lone constant, or unit variant or identifier: ignore
-          }
+            };
 
-          PatKind::Binding(_, _, Some(ref subpat)) => {
-              self.cat_pattern_(cmt, &subpat, op)?;
+            for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
+                let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
+                let subcmt = self.cat_imm_interior(pat, cmt.clone(), subpat_ty,
+                                                   InteriorField(PositionalField(i)));
+                self.cat_pattern_(subcmt, &subpat, op)?;
+            }
           }
 
           PatKind::Struct(_, ref field_pats, _) => {
@@ -1194,6 +1153,10 @@ fn cat_pattern_<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F)
             }
           }
 
+          PatKind::Binding(_, _, Some(ref subpat)) => {
+              self.cat_pattern_(cmt, &subpat, op)?;
+          }
+
           PatKind::Tuple(ref subpats, ddpos) => {
             // (p1, ..., pN)
             let expected_len = match self.pat_ty(&pat)?.sty {
@@ -1202,10 +1165,8 @@ fn cat_pattern_<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F)
             };
             for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
                 let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
-                let subcmt =
-                    self.cat_imm_interior(
-                        pat, cmt.clone(), subpat_ty,
-                        InteriorField(PositionalField(i)));
+                let subcmt = self.cat_imm_interior(pat, cmt.clone(), subpat_ty,
+                                                   InteriorField(PositionalField(i)));
                 self.cat_pattern_(subcmt, &subpat, op)?;
             }
           }
@@ -1215,25 +1176,26 @@ fn cat_pattern_<F>(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F)
             // PatKind::Ref since that information is already contained
             // in the type.
             let subcmt = self.cat_deref(pat, cmt, 0, None)?;
-              self.cat_pattern_(subcmt, &subpat, op)?;
+            self.cat_pattern_(subcmt, &subpat, op)?;
           }
 
           PatKind::Vec(ref before, ref slice, ref after) => {
-              let context = InteriorOffsetKind::Pattern;
-              let elt_cmt = self.cat_index(pat, cmt, context)?;
-              for before_pat in before {
-                  self.cat_pattern_(elt_cmt.clone(), &before_pat, op)?;
-              }
-              if let Some(ref slice_pat) = *slice {
-                  self.cat_pattern_(elt_cmt.clone(), &slice_pat, op)?;
-              }
-              for after_pat in after {
-                  self.cat_pattern_(elt_cmt.clone(), &after_pat, op)?;
-              }
+            let context = InteriorOffsetKind::Pattern;
+            let elt_cmt = self.cat_index(pat, cmt, context)?;
+            for before_pat in before {
+                self.cat_pattern_(elt_cmt.clone(), &before_pat, op)?;
+            }
+            if let Some(ref slice_pat) = *slice {
+                self.cat_pattern_(elt_cmt.clone(), &slice_pat, op)?;
+            }
+            for after_pat in after {
+                self.cat_pattern_(elt_cmt.clone(), &after_pat, op)?;
+            }
           }
 
-          PatKind::Lit(_) | PatKind::Range(_, _) => {
-              /*always ok*/
+          PatKind::Path(..) | PatKind::Binding(_, _, None) |
+          PatKind::Lit(..) | PatKind::Range(..) | PatKind::Wild => {
+            // always ok
           }
         }
 
index 62d3421770c2f2cefe0a28ce6115ac5e006e21d1..93507246241de62bde905a68e9fde7acc47e7c47 100644 (file)
@@ -1063,7 +1063,7 @@ fn fmt_tuple(fmt: &mut Formatter, lvs: &[Operand]) -> fmt::Result {
                             Some(tcx.lookup_item_type(variant_def.did).generics)
                         })?;
 
-                        match variant_def.kind() {
+                        match variant_def.kind {
                             ty::VariantKind::Unit => Ok(()),
                             ty::VariantKind::Tuple => fmt_tuple(fmt, lvs),
                             ty::VariantKind::Struct => {
index a37990061920b6fa299a2de535010bb78a8490e7..5ccc96210be78d7d56cc8b739544ec05a1fce18e 100644 (file)
@@ -197,23 +197,70 @@ pub struct OutputFilenames {
     pub outputs: HashMap<OutputType, Option<PathBuf>>,
 }
 
+/// Codegen unit names generated by the numbered naming scheme will contain this
+/// marker right before the index of the codegen unit.
+pub const NUMBERED_CODEGEN_UNIT_MARKER: &'static str = ".cgu-";
+
 impl OutputFilenames {
     pub fn path(&self, flavor: OutputType) -> PathBuf {
         self.outputs.get(&flavor).and_then(|p| p.to_owned())
             .or_else(|| self.single_output_file.clone())
-            .unwrap_or_else(|| self.temp_path(flavor))
+            .unwrap_or_else(|| self.temp_path(flavor, None))
     }
 
-    pub fn temp_path(&self, flavor: OutputType) -> PathBuf {
+    /// Get the path where a compilation artifact of the given type for the
+    /// given codegen unit should be placed on disk. If codegen_unit_name is
+    /// None, a path distinct from those of any codegen unit will be generated.
+    pub fn temp_path(&self,
+                     flavor: OutputType,
+                     codegen_unit_name: Option<&str>)
+                     -> PathBuf {
+        let extension = match flavor {
+            OutputType::Bitcode => "bc",
+            OutputType::Assembly => "s",
+            OutputType::LlvmAssembly => "ll",
+            OutputType::Object => "o",
+            OutputType::DepInfo => "d",
+            OutputType::Exe => "",
+        };
+
+        self.temp_path_ext(extension, codegen_unit_name)
+    }
+
+    /// Like temp_path, but also supports things where there is no corresponding
+    /// OutputType, like no-opt-bitcode or lto-bitcode.
+    pub fn temp_path_ext(&self,
+                         ext: &str,
+                         codegen_unit_name: Option<&str>)
+                         -> PathBuf {
         let base = self.out_directory.join(&self.filestem());
-        match flavor {
-            OutputType::Bitcode => base.with_extension("bc"),
-            OutputType::Assembly => base.with_extension("s"),
-            OutputType::LlvmAssembly => base.with_extension("ll"),
-            OutputType::Object => base.with_extension("o"),
-            OutputType::DepInfo => base.with_extension("d"),
-            OutputType::Exe => base,
+
+        let mut extension = String::new();
+
+        if let Some(codegen_unit_name) = codegen_unit_name {
+            if codegen_unit_name.contains(NUMBERED_CODEGEN_UNIT_MARKER) {
+                // If we use the numbered naming scheme for modules, we don't want
+                // the files to look like <crate-name><extra>.<crate-name>.<index>.<ext>
+                // but simply <crate-name><extra>.<index>.<ext>
+                let marker_offset = codegen_unit_name.rfind(NUMBERED_CODEGEN_UNIT_MARKER)
+                                                     .unwrap();
+                let index_offset = marker_offset + NUMBERED_CODEGEN_UNIT_MARKER.len();
+                extension.push_str(&codegen_unit_name[index_offset .. ]);
+            } else {
+                extension.push_str(codegen_unit_name);
+            };
+        }
+
+        if !ext.is_empty() {
+            if !extension.is_empty() {
+                extension.push_str(".");
+            }
+
+            extension.push_str(ext);
         }
+
+        let path = base.with_extension(&extension[..]);
+        path
     }
 
     pub fn with_extension(&self, extension: &str) -> PathBuf {
index 219cb5e383a8d00ebc3b8dc270ca48bcc1e6a593..4c8fa80dd0b9669acff4ea2d9c13422681943b10 100644 (file)
@@ -591,6 +591,13 @@ pub fn alloc_trait_def(self, def: ty::TraitDef<'gcx>)
         self.global_interners.arenas.trait_defs.alloc(def)
     }
 
+    pub fn insert_adt_def(self, did: DefId, adt_def: ty::AdtDefMaster<'gcx>) {
+        // this will need a transmute when reverse-variance is removed
+        if let Some(prev) = self.adt_defs.borrow_mut().insert(did, adt_def) {
+            bug!("Tried to overwrite interned AdtDef: {:?}", prev)
+        }
+    }
+
     pub fn intern_adt_def(self,
                           did: DefId,
                           kind: ty::AdtKind,
@@ -598,10 +605,7 @@ pub fn intern_adt_def(self,
                           -> ty::AdtDefMaster<'gcx> {
         let def = ty::AdtDefData::new(self, did, kind, variants);
         let interned = self.global_interners.arenas.adt_defs.alloc(def);
-        // this will need a transmute when reverse-variance is removed
-        if let Some(prev) = self.adt_defs.borrow_mut().insert(did, interned) {
-            bug!("Tried to overwrite interned AdtDef: {:?}", prev)
-        }
+        self.insert_adt_def(did, interned);
         interned
     }
 
index 14db922d298101e82c941202deb7834743b9f4f4..03e893727d1b58c7d88142ee72bb336dfe509cfd 100644 (file)
@@ -1715,7 +1715,7 @@ pub fn variant_index_with_id(&self, vid: DefId) -> usize {
     pub fn variant_of_def(&self, def: Def) -> &VariantDefData<'gcx, 'container> {
         match def {
             Def::Variant(_, vid) => self.variant_with_id(vid),
-            Def::Struct(..) | Def::TyAlias(..) => self.struct_variant(),
+            Def::Struct(..) | Def::TyAlias(..) | Def::AssociatedTy(..) => self.struct_variant(),
             _ => bug!("unexpected def {:?} in variant_of_def", def)
         }
     }
@@ -1925,14 +1925,6 @@ fn fields_iter(&self) -> slice::Iter<FieldDefData<'tcx, 'container>> {
         self.fields.iter()
     }
 
-    pub fn kind(&self) -> VariantKind {
-        self.kind
-    }
-
-    pub fn is_tuple_struct(&self) -> bool {
-        self.kind() == VariantKind::Tuple
-    }
-
     #[inline]
     pub fn find_field_named(&self,
                             name: ast::Name)
@@ -2454,6 +2446,20 @@ pub fn expect_def_or_none(self, id: NodeId) -> Option<Def> {
         self.def_map.borrow().get(&id).map(|resolution| resolution.full_def())
     }
 
+    // Returns `ty::VariantDef` if `def` refers to a struct,
+    // or variant or their constructors, panics otherwise.
+    pub fn expect_variant_def(self, def: Def) -> VariantDef<'tcx> {
+        match def {
+            Def::Variant(enum_did, did) => {
+                self.lookup_adt_def(enum_did).variant_with_id(did)
+            }
+            Def::Struct(did) => {
+                self.lookup_adt_def(did).struct_variant()
+            }
+            _ => bug!("expect_variant_def used with unexpected def {:?}", def)
+        }
+    }
+
     pub fn def_key(self, id: DefId) -> ast_map::DefKey {
         if id.is_local() {
             self.map.def_key(id)
index a5a9dea61ad7c99661851be39303634677eb4198..866a91b4d95100d81bbee6b850795114db49d77d 100644 (file)
@@ -246,9 +246,9 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat)
             let pat_ty = cx.tcx.pat_ty(p);
             if let ty::TyEnum(edef, _) = pat_ty.sty {
                 if let Def::Local(..) = cx.tcx.expect_def(p.id) {
-                    if edef.variants.iter().any(|variant|
-                        variant.name == name.node && variant.kind() == VariantKind::Unit
-                    ) {
+                    if edef.variants.iter().any(|variant| {
+                        variant.name == name.node && variant.kind == VariantKind::Unit
+                    }) {
                         let ty_path = cx.tcx.item_path_str(edef.did);
                         let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170,
                             "pattern binding `{}` is named the same as one \
@@ -489,7 +489,7 @@ fn visit_id(&mut self, node_id: NodeId) {
 impl<'a, 'tcx> Folder for StaticInliner<'a, 'tcx> {
     fn fold_pat(&mut self, pat: P<Pat>) -> P<Pat> {
         return match pat.node {
-            PatKind::Path(..) | PatKind::QPath(..) => {
+            PatKind::Path(..) => {
                 match self.tcx.expect_def(pat.id) {
                     Def::AssociatedConst(did) | Def::Const(did) => {
                         let substs = Some(self.tcx.node_id_item_substs(pat.id).substs);
@@ -563,7 +563,7 @@ fn construct_witness<'a,'tcx>(cx: &MatchCheckCtxt<'a,'tcx>, ctor: &Constructor,
 
         ty::TyEnum(adt, _) | ty::TyStruct(adt, _)  => {
             let v = ctor.variant_for_adt(adt);
-            match v.kind() {
+            match v.kind {
                 VariantKind::Struct => {
                     let field_pats: hir::HirVec<_> = v.fields.iter()
                         .zip(pats)
@@ -583,7 +583,7 @@ fn construct_witness<'a,'tcx>(cx: &MatchCheckCtxt<'a,'tcx>, ctor: &Constructor,
                     PatKind::TupleStruct(def_to_path(cx.tcx, v.did), pats.collect(), None)
                 }
                 VariantKind::Unit => {
-                    PatKind::Path(def_to_path(cx.tcx, v.did))
+                    PatKind::Path(None, def_to_path(cx.tcx, v.did))
                 }
             }
         }
@@ -786,16 +786,12 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat,
     match pat.node {
         PatKind::Struct(..) | PatKind::TupleStruct(..) | PatKind::Path(..) =>
             match cx.tcx.expect_def(pat.id) {
-                Def::Const(..) | Def::AssociatedConst(..) =>
-                    span_bug!(pat.span, "const pattern should've \
-                                         been rewritten"),
-                Def::Struct(..) | Def::TyAlias(..) => vec![Single],
                 Def::Variant(_, id) => vec![Variant(id)],
-                def => span_bug!(pat.span, "pat_constructors: unexpected \
-                                            definition {:?}", def),
+                Def::Struct(..) | Def::TyAlias(..) | Def::AssociatedTy(..) => vec![Single],
+                Def::Const(..) | Def::AssociatedConst(..) =>
+                    span_bug!(pat.span, "const pattern should've been rewritten"),
+                def => span_bug!(pat.span, "pat_constructors: unexpected definition {:?}", def),
             },
-        PatKind::QPath(..) =>
-            span_bug!(pat.span, "const pattern should've been rewritten"),
         PatKind::Lit(ref expr) =>
             vec![ConstantValue(eval_const_expr(cx.tcx, &expr))],
         PatKind::Range(ref lo, ref hi) =>
@@ -934,10 +930,6 @@ pub fn specialize<'a, 'b, 'tcx>(
             }
         }
 
-        PatKind::QPath(_, _) => {
-            span_bug!(pat_span, "const pattern should've been rewritten")
-        }
-
         PatKind::Struct(_, ref pattern_fields, _) => {
             let adt = cx.tcx.node_id_to_type(pat_id).ty_adt_def().unwrap();
             let variant = constructor.variant_for_adt(adt);
index 6c37662206ce252f5592eeeee86853fe43c4b1e6..a3c707e82a0ff45e45be5c74e951c184296f8cb5 100644 (file)
@@ -323,7 +323,7 @@ pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
         hir::ExprPath(_, ref path) => {
             match tcx.expect_def(expr.id) {
-                Def::Struct(..) | Def::Variant(..) => PatKind::Path(path.clone()),
+                Def::Struct(..) | Def::Variant(..) => PatKind::Path(None, path.clone()),
                 Def::Const(def_id) | Def::AssociatedConst(def_id) => {
                     let substs = Some(tcx.node_id_item_substs(expr.id).substs);
                     let (expr, _ty) = lookup_const_by_id(tcx, def_id, substs).unwrap();
index 277789f5312eee0dd9ce8c73e133714ec6d0832a..eef2b6e6f37b412c0df669583a8ea4bc920e88e5 100644 (file)
@@ -1081,7 +1081,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
 
         // Remove assembly source, unless --save-temps was specified
         if !sess.opts.cg.save_temps {
-            fs::remove_file(&outputs.temp_path(OutputType::Assembly)).unwrap();
+            fs::remove_file(&outputs.temp_path(OutputType::Assembly, None)).unwrap();
         }
     } else {
         time(sess.time_passes(),
index 7e9b6f561b9846f1d0b4f09c72b31918ebd63a64..15914838acf0db85289b97ececd3fa5c90696a4b 100644 (file)
@@ -360,7 +360,7 @@ fn check_impl_item(&mut self, cx: &LateContext, ii: &hir::ImplItem) {
 
     fn check_pat(&mut self, cx: &LateContext, p: &hir::Pat) {
         // Lint for constants that look like binding identifiers (#7526)
-        if let PatKind::Path(ref path) = p.node {
+        if let PatKind::Path(None, ref path) = p.node {
             if !path.global && path.segments.len() == 1 && path.segments[0].parameters.is_empty() {
                 if let Def::Const(..) = cx.tcx.expect_def(p.id) {
                     NonUpperCaseGlobals::check_upper_case(cx, "constant in pattern",
index eada2a9cd7a63332f6b30aeb10712a785d047f3e..6d3699e978794f8e3bd4ddb078382d2135e177c4 100644 (file)
@@ -471,28 +471,34 @@ fn get_struct_variant<'tcx>(intr: &IdentInterner,
 
     let doc = cdata.lookup_item(item_id);
     let did = DefId { krate: cdata.cnum, index: item_id };
+    let mut ctor_did = None;
     let (kind, variants) = match item_family(doc) {
         Enum => {
             (ty::AdtKind::Enum,
              get_enum_variants(intr, cdata, doc))
         }
         Struct(..) => {
-            let ctor_did =
-                reader::maybe_get_doc(doc, tag_items_data_item_struct_ctor).
-                map_or(did, |ctor_doc| translated_def_id(cdata, ctor_doc));
+            // Use separate constructor id for unit/tuple structs and reuse did for braced structs.
+            ctor_did = reader::maybe_get_doc(doc, tag_items_data_item_struct_ctor).map(|ctor_doc| {
+                translated_def_id(cdata, ctor_doc)
+            });
             (ty::AdtKind::Struct,
-             vec![get_struct_variant(intr, cdata, doc, ctor_did)])
+             vec![get_struct_variant(intr, cdata, doc, ctor_did.unwrap_or(did))])
         }
         _ => bug!("get_adt_def called on a non-ADT {:?} - {:?}",
                   item_family(doc), did)
     };
 
     let adt = tcx.intern_adt_def(did, kind, variants);
+    if let Some(ctor_did) = ctor_did {
+        // Make adt definition available through constructor id as well.
+        tcx.insert_adt_def(ctor_did, adt);
+    }
 
     // this needs to be done *after* the variant is interned,
     // to support recursive structures
     for variant in &adt.variants {
-        if variant.kind() == ty::VariantKind::Tuple &&
+        if variant.kind == ty::VariantKind::Tuple &&
             adt.adt_kind() == ty::AdtKind::Enum {
             // tuple-like enum variant fields aren't real items - get the types
             // from the ctor.
index b6f49569958d68189effffc864b47396c1029410..7314259423592985f68cd4cd512a17247d7d2fd5 100644 (file)
@@ -217,7 +217,7 @@ fn encode_parent_item(rbml_w: &mut Encoder, id: DefId) {
 fn encode_struct_fields(rbml_w: &mut Encoder,
                         variant: ty::VariantDef) {
     for f in &variant.fields {
-        if variant.is_tuple_struct() {
+        if variant.kind == ty::VariantKind::Tuple {
             rbml_w.start_tag(tag_item_unnamed_field);
         } else {
             rbml_w.start_tag(tag_item_field);
@@ -250,7 +250,7 @@ fn encode_enum_variant_info<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
         let _task = index.record(vid, rbml_w);
         rbml_w.start_tag(tag_items_data_item);
         encode_def_id_and_key(ecx, rbml_w, vid);
-        encode_family(rbml_w, match variant.kind() {
+        encode_family(rbml_w, match variant.kind {
             ty::VariantKind::Struct => 'V',
             ty::VariantKind::Tuple => 'v',
             ty::VariantKind::Unit => 'w',
index b5da50792762f17f33e4202447b0a05f44e143d6..c54c8bfb5981ed6d8469510cc76c47a212992f36 100644 (file)
@@ -13,7 +13,7 @@
 use rustc_data_structures::indexed_vec::Idx;
 use rustc_const_eval as const_eval;
 use rustc::hir::def::Def;
-use rustc::hir::pat_util::{EnumerateAndAdjustIterator, pat_is_resolved_const};
+use rustc::hir::pat_util::EnumerateAndAdjustIterator;
 use rustc::ty::{self, Ty};
 use rustc::mir::repr::*;
 use rustc::hir::{self, PatKind};
@@ -76,9 +76,7 @@ fn to_pattern(&mut self, pat: &hir::Pat) -> Pattern<'tcx> {
                 PatternKind::Range { lo: lo, hi: hi }
             },
 
-            PatKind::Path(..) | PatKind::QPath(..)
-                if pat_is_resolved_const(&self.cx.tcx.def_map.borrow(), pat) =>
-            {
+            PatKind::Path(..) => {
                 match self.cx.tcx.expect_def(pat.id) {
                     Def::Const(def_id) | Def::AssociatedConst(def_id) => {
                         let tcx = self.cx.tcx.global_tcx();
@@ -104,11 +102,9 @@ fn to_pattern(&mut self, pat: &hir::Pat) -> Pattern<'tcx> {
                             }
                         }
                     }
-                    def =>
-                        span_bug!(
-                            pat.span,
-                            "def not a constant: {:?}",
-                            def),
+                    _ => {
+                        self.variant_or_leaf(pat, vec![])
+                    }
                 }
             }
 
@@ -199,10 +195,6 @@ fn to_pattern(&mut self, pat: &hir::Pat) -> Pattern<'tcx> {
                 }
             }
 
-            PatKind::Path(..) => {
-                self.variant_or_leaf(pat, vec![])
-            }
-
             PatKind::TupleStruct(_, ref subpatterns, ddpos) => {
                 let pat_ty = self.cx.tcx.node_id_to_type(pat.id);
                 let adt_def = match pat_ty.sty {
@@ -253,10 +245,6 @@ fn to_pattern(&mut self, pat: &hir::Pat) -> Pattern<'tcx> {
 
                 self.variant_or_leaf(pat, subpatterns)
             }
-
-            PatKind::QPath(..) => {
-                span_bug!(pat.span, "unexpanded macro or bad constant etc");
-            }
         };
 
         Pattern {
@@ -325,7 +313,7 @@ fn variant_or_leaf(&mut self,
                 }
             }
 
-            Def::Struct(..) | Def::TyAlias(..) => {
+            Def::Struct(..) | Def::TyAlias(..) | Def::AssociatedTy(..) => {
                 PatternKind::Leaf { subpatterns: subpatterns }
             }
 
index 85a6f732dd52e71716efd93395dbcd210f028a99..acaf9b9b2faeed9d57b1b4da9a0cccd4b3b26238 100644 (file)
@@ -436,7 +436,6 @@ fn visit_expr(&mut self, expr: &hir::Expr) {
                 }
             }
             hir::ExprPath(..) => {
-
                 if let Def::Struct(..) = self.tcx.expect_def(expr.id) {
                     let expr_ty = self.tcx.expr_ty(expr);
                     let def = match expr_ty.sty {
index 66b0d663424aa829a95c673de86b76e27b63ae8e..9079cc8ccb122b86ecf41654a760b6c0e595ef08 100644 (file)
@@ -980,7 +980,7 @@ pub struct Resolver<'a> {
     //
     // There will be an anonymous module created around `g` with the ID of the
     // entry block for `f`.
-    module_map: NodeMap<Module<'a>>,
+    pub module_map: NodeMap<Module<'a>>,
 
     // Whether or not to print error messages. Can be set to true
     // when getting additional info for error message suggestions,
@@ -2180,7 +2180,8 @@ fn fresh_binding(&mut self,
         // because that breaks the assumptions later
         // passes make about or-patterns.)
         let renamed = mtwt::resolve(ident.node);
-        let def = match bindings.get(&renamed).cloned() {
+        let mut def = Def::Local(self.definitions.local_def_id(pat_id), pat_id);
+        match bindings.get(&renamed).cloned() {
             Some(id) if id == outer_pat_id => {
                 // `Variant(a, a)`, error
                 resolve_error(
@@ -2189,7 +2190,6 @@ fn fresh_binding(&mut self,
                     ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(
                         &ident.node.name.as_str())
                 );
-                Def::Err
             }
             Some(..) if pat_src == PatternSource::FnParam => {
                 // `fn f(a: u8, a: u8)`, error
@@ -2199,29 +2199,24 @@ fn fresh_binding(&mut self,
                     ResolutionError::IdentifierBoundMoreThanOnceInParameterList(
                         &ident.node.name.as_str())
                 );
-                Def::Err
             }
             Some(..) if pat_src == PatternSource::Match => {
                 // `Variant1(a) | Variant2(a)`, ok
                 // Reuse definition from the first `a`.
-                self.value_ribs.last_mut().unwrap().bindings[&renamed]
+                def = self.value_ribs.last_mut().unwrap().bindings[&renamed];
             }
             Some(..) => {
                 span_bug!(ident.span, "two bindings with the same name from \
                                        unexpected pattern source {:?}", pat_src);
             }
             None => {
-                // A completely fresh binding, add to the lists.
-                // FIXME: Later stages are not ready to deal with `Def::Err` here yet, so
-                // define `Invalid` bindings as `Def::Local`, just don't add them to the lists.
-                let def = Def::Local(self.definitions.local_def_id(pat_id), pat_id);
+                // A completely fresh binding, add to the lists if it's valid.
                 if ident.node.name != keywords::Invalid.name() {
                     bindings.insert(renamed, outer_pat_id);
                     self.value_ribs.last_mut().unwrap().bindings.insert(renamed, def);
                 }
-                def
             }
-        };
+        }
 
         PathResolution::new(def)
     }
@@ -2287,43 +2282,41 @@ fn resolve_pattern(&mut self,
                 PatKind::Ident(bmode, ref ident, ref opt_pat) => {
                     // First try to resolve the identifier as some existing
                     // entity, then fall back to a fresh binding.
-                    let local_def = self.resolve_identifier(ident.node, ValueNS, true);
-                    let resolution = if let Some(LocalDef { def, .. }) = local_def {
+                    let resolution = self.resolve_identifier(ident.node, ValueNS, true)
+                                         .map(|local_def| PathResolution::new(local_def.def))
+                                         .and_then(|resolution| {
                         let always_binding = !pat_src.is_refutable() || opt_pat.is_some() ||
                                              bmode != BindingMode::ByValue(Mutability::Immutable);
-                        match def {
+                        match resolution.base_def {
                             Def::Struct(..) | Def::Variant(..) |
                             Def::Const(..) | Def::AssociatedConst(..) if !always_binding => {
                                 // A constant, unit variant, etc pattern.
-                                PathResolution::new(def)
+                                Some(resolution)
                             }
                             Def::Struct(..) | Def::Variant(..) |
                             Def::Const(..) | Def::AssociatedConst(..) | Def::Static(..) => {
                                 // A fresh binding that shadows something unacceptable.
-                                let kind_name = PathResolution::new(def).kind_name();
                                 resolve_error(
                                     self,
                                     ident.span,
                                     ResolutionError::BindingShadowsSomethingUnacceptable(
-                                        pat_src.descr(), kind_name, ident.node.name)
+                                        pat_src.descr(), resolution.kind_name(), ident.node.name)
                                 );
-                                err_path_resolution()
+                                None
                             }
-                            Def::Local(..) | Def::Upvar(..) | Def::Fn(..) | Def::Err => {
+                            Def::Local(..) | Def::Upvar(..) | Def::Fn(..) => {
                                 // These entities are explicitly allowed
                                 // to be shadowed by fresh bindings.
-                                self.fresh_binding(ident, pat.id, outer_pat_id,
-                                                   pat_src, bindings)
+                                None
                             }
                             def => {
                                 span_bug!(ident.span, "unexpected definition for an \
                                                        identifier in pattern {:?}", def);
                             }
                         }
-                    } else {
-                        // Fall back to a fresh binding.
+                    }).unwrap_or_else(|| {
                         self.fresh_binding(ident, pat.id, outer_pat_id, pat_src, bindings)
-                    };
+                    });
 
                     self.record_def(pat.id, resolution);
                 }
@@ -2331,7 +2324,7 @@ fn resolve_pattern(&mut self,
                 PatKind::TupleStruct(ref path, _, _) => {
                     self.resolve_pattern_path(pat.id, None, path, ValueNS, |def| {
                         match def {
-                            Def::Struct(..) | Def::Variant(..) | Def::Err => true,
+                            Def::Struct(..) | Def::Variant(..) => true,
                             _ => false,
                         }
                     }, "variant or struct");
@@ -2341,7 +2334,7 @@ fn resolve_pattern(&mut self,
                     self.resolve_pattern_path(pat.id, qself.as_ref(), path, ValueNS, |def| {
                         match def {
                             Def::Struct(..) | Def::Variant(..) |
-                            Def::Const(..) | Def::AssociatedConst(..) | Def::Err => true,
+                            Def::Const(..) | Def::AssociatedConst(..) => true,
                             _ => false,
                         }
                     }, "variant, struct or constant");
@@ -2351,7 +2344,7 @@ fn resolve_pattern(&mut self,
                     self.resolve_pattern_path(pat.id, None, path, TypeNS, |def| {
                         match def {
                             Def::Struct(..) | Def::Variant(..) |
-                            Def::TyAlias(..) | Def::AssociatedTy(..) | Def::Err => true,
+                            Def::TyAlias(..) | Def::AssociatedTy(..) => true,
                             _ => false,
                         }
                     }, "variant, struct or type alias");
@@ -2482,7 +2475,7 @@ fn resolve_identifier(&mut self,
                           record_used: bool)
                           -> Option<LocalDef> {
         if identifier.name == keywords::Invalid.name() {
-            return Some(LocalDef::from_def(Def::Err));
+            return None;
         }
 
         self.resolve_ident_in_lexical_scope(identifier, namespace, record_used)
@@ -2674,6 +2667,34 @@ fn with_no_errors<T, F>(&mut self, f: F) -> T
         rs
     }
 
+    // Calls `f` with a `Resolver` whose current lexical scope is `module`'s lexical scope,
+    // i.e. the module's items and the prelude (unless the module is `#[no_implicit_prelude]`).
+    // FIXME #34673: This needs testing.
+    pub fn with_module_lexical_scope<T, F>(&mut self, module: Module<'a>, f: F) -> T
+        where F: FnOnce(&mut Resolver<'a>) -> T,
+    {
+        self.with_empty_ribs(|this| {
+            this.value_ribs.push(Rib::new(ModuleRibKind(module)));
+            this.type_ribs.push(Rib::new(ModuleRibKind(module)));
+            f(this)
+        })
+    }
+
+    fn with_empty_ribs<T, F>(&mut self, f: F) -> T
+        where F: FnOnce(&mut Resolver<'a>) -> T,
+    {
+        use ::std::mem::replace;
+        let value_ribs = replace(&mut self.value_ribs, Vec::new());
+        let type_ribs = replace(&mut self.type_ribs, Vec::new());
+        let label_ribs = replace(&mut self.label_ribs, Vec::new());
+
+        let result = f(self);
+        self.value_ribs = value_ribs;
+        self.type_ribs = type_ribs;
+        self.label_ribs = label_ribs;
+        result
+    }
+
     fn find_fallback_in_self_type(&mut self, name: Name) -> FallbackSuggestion {
         fn extract_node_id(t: &Ty) -> Option<NodeId> {
             match t.node {
@@ -2880,8 +2901,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
                                 if !msg.is_empty() {
                                     msg = format!(". Did you mean {}?", msg);
                                 } else {
-                                    // we check if this a module and if so, we display a help
-                                    // message
+                                    // we display a help message if this is a module
                                     let name_path = path.segments.iter()
                                                         .map(|seg| seg.identifier.name)
                                                         .collect::<Vec<_>>();
index c1960eeee46b8fb7eccede5f0fda8f5f911e96b4..4ffb5477305493e1e366a73dd125a6ffde0b85eb 100644 (file)
@@ -29,6 +29,7 @@
 
 use rustc::hir::def::Def;
 use rustc::hir::def_id::DefId;
+use rustc::hir::map::Node;
 use rustc::session::Session;
 use rustc::ty::{self, TyCtxt, ImplOrTraitItem, ImplOrTraitItemContainer};
 
@@ -1299,7 +1300,14 @@ fn visit_expr(&mut self, ex: &ast::Expr) {
             ast::ExprKind::TupField(ref sub_ex, idx) => {
                 self.visit_expr(&sub_ex);
 
-                let hir_node = self.save_ctxt.tcx.map.expect_expr(sub_ex.id);
+                let hir_node = match self.save_ctxt.tcx.map.find(sub_ex.id) {
+                    Some(Node::NodeExpr(expr)) => expr,
+                    _ => {
+                        debug!("Missing or weird node for sub-expression {} in {:?}",
+                               sub_ex.id, ex);
+                        return;
+                    }
+                };
                 let ty = &self.tcx.expr_ty_adjusted(&hir_node).sty;
                 match *ty {
                     ty::TyStruct(def, _) => {
index 3ef6e29a6f83894da32612e3ba11b5fa4fcd780a..08e894ffbcfd48c4e37fde4d75f6565713c79f80 100644 (file)
@@ -796,7 +796,7 @@ fn any_irrefutable_adt_pat(tcx: TyCtxt, m: &[Match], col: usize) -> bool {
             PatKind::Tuple(..) => true,
             PatKind::Struct(..) | PatKind::TupleStruct(..) | PatKind::Path(..) => {
                 match tcx.expect_def(pat.id) {
-                    Def::Struct(..) | Def::TyAlias(..) => true,
+                    Def::Struct(..) | Def::TyAlias(..) | Def::AssociatedTy(..) => true,
                     _ => false,
                 }
             }
@@ -1495,20 +1495,27 @@ fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
     fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
 
     fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
+        let cmt_id = |cmt: &mc::cmt| match cmt.cat {
+            Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, ..}, ..}) |
+            Categorization::Local(vid) => Some(vid),
+            Categorization::Interior(ref base_cmt, mc::InteriorField(_)) => Some(base_cmt.id),
+            _ => None
+        };
         match cmt.cat {
             Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
             Categorization::Local(vid) => self.reassigned |= self.node == vid,
-            Categorization::Interior(ref base_cmt, mc::InteriorField(field)) => {
-                match base_cmt.cat {
-                    Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
-                    Categorization::Local(vid) => {
-                        self.reassigned |= self.node == vid &&
-                            (self.field.is_none() || Some(field) == self.field)
-                    },
-                    _ => {}
+            ref cat => {
+                let mut cat = cat;
+                while let &Categorization::Interior(ref base_cmt, mc::InteriorField(field)) = cat {
+                    if let Some(vid) = cmt_id(base_cmt) {
+                        if self.node == vid && (self.field.is_none() || self.field == Some(field)) {
+                            self.reassigned = true;
+                            return;
+                        }
+                    }
+                    cat = &base_cmt.cat;
                 }
-            },
-            _ => {}
+            }
         }
     }
 }
@@ -1996,7 +2003,7 @@ pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                         cleanup_scope)
                 });
         }
-        PatKind::Path(..) | PatKind::QPath(..) | PatKind::Wild |
+        PatKind::Path(..) | PatKind::Wild |
         PatKind::Lit(..) | PatKind::Range(..) => ()
     }
     return bcx;
index df3d2d149b99ce072fbbf3ba13a1755fafa67455..6c2a09f8060c5a4b36fb87e1701d74bff6f62ed7 100644 (file)
@@ -229,6 +229,7 @@ pub fn store_fn_arg(&self, bcx: &BlockAndBuilder, idx: &mut usize, dst: ValueRef
 ///
 /// I will do my best to describe this structure, but these
 /// comments are reverse-engineered and may be inaccurate. -NDM
+#[derive(Clone)]
 pub struct FnType {
     /// The LLVM types of each argument.
     pub args: Vec<ArgType>,
index 744712b22b060ef3cafafef22dc282fb8329daa2..a9f3d2f8a175485b954c016e6d2e207a7c4084ff 100644 (file)
@@ -205,7 +205,7 @@ pub fn link_binary(sess: &Session,
 
     // Remove the temporary object file and metadata if we aren't saving temps
     if !sess.opts.cg.save_temps {
-        for obj in object_filenames(sess, outputs) {
+        for obj in object_filenames(trans, outputs) {
             remove(sess, &obj);
         }
         remove(sess, &outputs.with_extension("metadata.o"));
@@ -316,7 +316,7 @@ fn link_binary_output(sess: &Session,
                       crate_type: config::CrateType,
                       outputs: &OutputFilenames,
                       crate_name: &str) -> PathBuf {
-    let objects = object_filenames(sess, outputs);
+    let objects = object_filenames(trans, outputs);
     let default_filename = filename_for_input(sess, crate_type, crate_name,
                                               outputs);
     let out_filename = outputs.outputs.get(&OutputType::Exe)
@@ -356,10 +356,11 @@ fn link_binary_output(sess: &Session,
     out_filename
 }
 
-fn object_filenames(sess: &Session, outputs: &OutputFilenames) -> Vec<PathBuf> {
-    (0..sess.opts.cg.codegen_units).map(|i| {
-        let ext = format!("{}.o", i);
-        outputs.temp_path(OutputType::Object).with_extension(&ext)
+fn object_filenames(trans: &CrateTranslation,
+                    outputs: &OutputFilenames)
+                    -> Vec<PathBuf> {
+    trans.modules.iter().map(|module| {
+        outputs.temp_path(OutputType::Object, Some(&module.name[..]))
     }).collect()
 }
 
@@ -497,7 +498,7 @@ fn link_rlib<'a>(sess: &'a Session,
                 ab.add_file(&bc_deflated_filename);
 
                 // See the bottom of back::write::run_passes for an explanation
-                // of when we do and don't keep .0.bc files around.
+                // of when we do and don't keep .#module-name#.bc files around.
                 let user_wants_numbered_bitcode =
                         sess.opts.output_types.contains_key(&OutputType::Bitcode) &&
                         sess.opts.cg.codegen_units > 1;
index 31bc11fb215b08fa4298ee84964058d4d07cd06a..69e4a50804fadc5ca4c7a5f917ff4452cf3e9987 100644 (file)
 use flate;
 
 use std::ffi::CString;
+use std::path::Path;
 
 pub fn run(sess: &session::Session, llmod: ModuleRef,
            tm: TargetMachineRef, reachable: &[String],
            config: &ModuleConfig,
-           name_extra: &str,
-           output_names: &config::OutputFilenames) {
+           temp_no_opt_bc_filename: &Path) {
     if sess.opts.cg.prefer_dynamic {
         sess.struct_err("cannot prefer dynamic linking when performing LTO")
             .note("only 'staticlib', 'bin', and 'cdylib' outputs are \
@@ -132,8 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
     }
 
     if sess.opts.cg.save_temps {
-        let path = output_names.with_extension(&format!("{}.no-opt.lto.bc", name_extra));
-        let cstr = path2cstr(&path);
+        let cstr = path2cstr(temp_no_opt_bc_filename);
         unsafe {
             llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr());
         }
index 170c8f75b5056c5ab9054bd7562623a197fabea4..ebb6e0baf20a00e8c837a18249c5e9853e706caf 100644 (file)
@@ -304,6 +304,19 @@ fn push(&mut self, text: &str) {
     }
 }
 
+pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+                                                    t: ty::Ty<'tcx>,
+                                                    prefix: &str)
+                                                    -> String {
+    let empty_def_path = DefPath {
+        data: vec![],
+        krate: cstore::LOCAL_CRATE,
+    };
+    let hash = get_symbol_hash(scx, &empty_def_path, t, &[]);
+    let path = [token::intern_and_get_ident(prefix)];
+    mangle(path.iter().cloned(), Some(&hash[..]))
+}
+
 /// Only symbols that are invisible outside their compilation unit should use a
 /// name generated by this function.
 pub fn internal_name_from_type_and_suffix<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
index ec20381d1890d1f55f5d6e543104503ac4542ae5..071960f1944cfe941a62467a88f9872b7354e2f0 100644 (file)
@@ -423,9 +423,9 @@ struct HandlerFreeVars<'a> {
 unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
                                mtrans: ModuleTranslation,
                                config: ModuleConfig,
-                               name_extra: String,
                                output_names: OutputFilenames) {
-    let ModuleTranslation { llmod, llcx } = mtrans;
+    let llmod = mtrans.llmod;
+    let llcx = mtrans.llcx;
     let tm = config.tm;
 
     // llcx doesn't outlive this function, so we can put this on the stack.
@@ -438,9 +438,10 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
     llvm::LLVMSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, fv);
     llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, fv);
 
+    let module_name = Some(&mtrans.name[..]);
+
     if config.emit_no_opt_bc {
-        let ext = format!("{}.no-opt.bc", name_extra);
-        let out = output_names.with_extension(&ext);
+        let out = output_names.temp_path_ext("no-opt.bc", module_name);
         let out = path2cstr(&out);
         llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
     }
@@ -512,13 +513,18 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
 
         match cgcx.lto_ctxt {
             Some((sess, reachable)) if sess.lto() =>  {
-                time(sess.time_passes(), "all lto passes", ||
-                     lto::run(sess, llmod, tm, reachable, &config,
-                              &name_extra, &output_names));
-
+                time(sess.time_passes(), "all lto passes", || {
+                    let temp_no_opt_bc_filename =
+                        output_names.temp_path_ext("no-opt.lto.bc", module_name);
+                    lto::run(sess,
+                             llmod,
+                             tm,
+                             reachable,
+                             &config,
+                             &temp_no_opt_bc_filename);
+                });
                 if config.emit_lto_bc {
-                    let name = format!("{}.lto.bc", name_extra);
-                    let out = output_names.with_extension(&name);
+                    let out = output_names.temp_path_ext("lto.bc", module_name);
                     let out = path2cstr(&out);
                     llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
                 }
@@ -556,8 +562,8 @@ unsafe fn with_codegen<F>(tm: TargetMachineRef,
     let write_obj = config.emit_obj && !config.obj_is_bitcode;
     let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode;
 
-    let bc_out = output_names.with_extension(&format!("{}.bc", name_extra));
-    let obj_out = output_names.with_extension(&format!("{}.o", name_extra));
+    let bc_out = output_names.temp_path(OutputType::Bitcode, module_name);
+    let obj_out = output_names.temp_path(OutputType::Object, module_name);
 
     if write_bc {
         let bc_out_c = path2cstr(&bc_out);
@@ -566,8 +572,7 @@ unsafe fn with_codegen<F>(tm: TargetMachineRef,
 
     time(config.time_passes, &format!("codegen passes [{}]", cgcx.worker), || {
         if config.emit_ir {
-            let ext = format!("{}.ll", name_extra);
-            let out = output_names.with_extension(&ext);
+            let out = output_names.temp_path(OutputType::LlvmAssembly, module_name);
             let out = path2cstr(&out);
             with_codegen(tm, llmod, config.no_builtins, |cpm| {
                 llvm::LLVMRustPrintModule(cpm, llmod, out.as_ptr());
@@ -576,7 +581,7 @@ unsafe fn with_codegen<F>(tm: TargetMachineRef,
         }
 
         if config.emit_asm {
-            let path = output_names.with_extension(&format!("{}.s", name_extra));
+            let path = output_names.temp_path(OutputType::Assembly, module_name);
 
             // We can't use the same module for asm and binary output, because that triggers
             // various errors like invalid IR or broken binaries, so we might have to clone the
@@ -713,27 +718,29 @@ pub fn run_passes(sess: &Session,
 
     {
         let work = build_work_item(sess,
-                                   trans.metadata_module,
+                                   trans.metadata_module.clone(),
                                    metadata_config.clone(),
-                                   crate_output.clone(),
-                                   "metadata".to_string());
+                                   crate_output.clone());
         work_items.push(work);
     }
 
-    for (index, mtrans) in trans.modules.iter().enumerate() {
+    for mtrans in trans.modules.iter() {
         let work = build_work_item(sess,
-                                   *mtrans,
+                                   mtrans.clone(),
                                    modules_config.clone(),
-                                   crate_output.clone(),
-                                   format!("{}", index));
+                                   crate_output.clone());
         work_items.push(work);
     }
 
     // Process the work items, optionally using worker threads.
-    if sess.opts.cg.codegen_units == 1 {
+    // NOTE: This code is not really adapted to incremental compilation where
+    //       the compiler decides the number of codegen units (and will
+    //       potentially create hundreds of them).
+    let num_workers = work_items.len() - 1;
+    if num_workers == 1 {
         run_work_singlethreaded(sess, &trans.reachable, work_items);
     } else {
-        run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units);
+        run_work_multithreaded(sess, work_items, num_workers);
     }
 
     // All codegen is finished.
@@ -748,32 +755,42 @@ pub fn run_passes(sess: &Session,
         }
     };
 
-    let copy_if_one_unit = |ext: &str,
-                            output_type: OutputType,
+    let copy_if_one_unit = |output_type: OutputType,
                             keep_numbered: bool| {
-        if sess.opts.cg.codegen_units == 1 {
+        if trans.modules.len() == 1 {
             // 1) Only one codegen unit.  In this case it's no difficulty
             //    to copy `foo.0.x` to `foo.x`.
-            copy_gracefully(&crate_output.with_extension(ext),
+            let module_name = Some(&(trans.modules[0].name)[..]);
+            let path = crate_output.temp_path(output_type, module_name);
+            copy_gracefully(&path,
                             &crate_output.path(output_type));
             if !sess.opts.cg.save_temps && !keep_numbered {
-                // The user just wants `foo.x`, not `foo.0.x`.
-                remove(sess, &crate_output.with_extension(ext));
+                // The user just wants `foo.x`, not `foo.#module-name#.x`.
+                remove(sess, &path);
             }
-        } else if crate_output.outputs.contains_key(&output_type) {
-            // 2) Multiple codegen units, with `--emit foo=some_name`.  We have
-            //    no good solution for this case, so warn the user.
-            sess.warn(&format!("ignoring emit path because multiple .{} files \
-                                were produced", ext));
-        } else if crate_output.single_output_file.is_some() {
-            // 3) Multiple codegen units, with `-o some_name`.  We have
-            //    no good solution for this case, so warn the user.
-            sess.warn(&format!("ignoring -o because multiple .{} files \
-                                were produced", ext));
         } else {
-            // 4) Multiple codegen units, but no explicit name.  We
-            //    just leave the `foo.0.x` files in place.
-            // (We don't have to do any work in this case.)
+            let ext = crate_output.temp_path(output_type, None)
+                                  .extension()
+                                  .unwrap()
+                                  .to_str()
+                                  .unwrap()
+                                  .to_owned();
+
+            if crate_output.outputs.contains_key(&output_type) {
+                // 2) Multiple codegen units, with `--emit foo=some_name`.  We have
+                //    no good solution for this case, so warn the user.
+                sess.warn(&format!("ignoring emit path because multiple .{} files \
+                                    were produced", ext));
+            } else if crate_output.single_output_file.is_some() {
+                // 3) Multiple codegen units, with `-o some_name`.  We have
+                //    no good solution for this case, so warn the user.
+                sess.warn(&format!("ignoring -o because multiple .{} files \
+                                    were produced", ext));
+            } else {
+                // 4) Multiple codegen units, but no explicit name.  We
+                //    just leave the `foo.0.x` files in place.
+                // (We don't have to do any work in this case.)
+            }
         }
     };
 
@@ -789,17 +806,17 @@ pub fn run_passes(sess: &Session,
                 // Copy to .bc, but always keep the .0.bc.  There is a later
                 // check to figure out if we should delete .0.bc files, or keep
                 // them for making an rlib.
-                copy_if_one_unit("0.bc", OutputType::Bitcode, true);
+                copy_if_one_unit(OutputType::Bitcode, true);
             }
             OutputType::LlvmAssembly => {
-                copy_if_one_unit("0.ll", OutputType::LlvmAssembly, false);
+                copy_if_one_unit(OutputType::LlvmAssembly, false);
             }
             OutputType::Assembly => {
-                copy_if_one_unit("0.s", OutputType::Assembly, false);
+                copy_if_one_unit(OutputType::Assembly, false);
             }
             OutputType::Object => {
                 user_wants_objects = true;
-                copy_if_one_unit("0.o", OutputType::Object, true);
+                copy_if_one_unit(OutputType::Object, true);
             }
             OutputType::Exe |
             OutputType::DepInfo => {}
@@ -810,51 +827,55 @@ pub fn run_passes(sess: &Session,
     // Clean up unwanted temporary files.
 
     // We create the following files by default:
-    //  - crate.0.bc
-    //  - crate.0.o
+    //  - crate.#module-name#.bc
+    //  - crate.#module-name#.o
     //  - crate.metadata.bc
     //  - crate.metadata.o
     //  - crate.o (linked from crate.##.o)
-    //  - crate.bc (copied from crate.0.bc)
+    //  - crate.bc (copied from crate.##.bc)
     // We may create additional files if requested by the user (through
     // `-C save-temps` or `--emit=` flags).
 
     if !sess.opts.cg.save_temps {
-        // Remove the temporary .0.o objects.  If the user didn't
+        // Remove the temporary .#module-name#.o objects.  If the user didn't
         // explicitly request bitcode (with --emit=bc), and the bitcode is not
-        // needed for building an rlib, then we must remove .0.bc as well.
+        // needed for building an rlib, then we must remove .#module-name#.bc as
+        // well.
 
-        // Specific rules for keeping .0.bc:
+        // Specific rules for keeping .#module-name#.bc:
         //  - If we're building an rlib (`needs_crate_bitcode`), then keep
         //    it.
         //  - If the user requested bitcode (`user_wants_bitcode`), and
         //    codegen_units > 1, then keep it.
         //  - If the user requested bitcode but codegen_units == 1, then we
-        //    can toss .0.bc because we copied it to .bc earlier.
+        //    can toss .#module-name#.bc because we copied it to .bc earlier.
         //  - If we're not building an rlib and the user didn't request
-        //    bitcode, then delete .0.bc.
+        //    bitcode, then delete .#module-name#.bc.
         // If you change how this works, also update back::link::link_rlib,
-        // where .0.bc files are (maybe) deleted after making an rlib.
+        // where .#module-name#.bc files are (maybe) deleted after making an
+        // rlib.
         let keep_numbered_bitcode = needs_crate_bitcode ||
                 (user_wants_bitcode && sess.opts.cg.codegen_units > 1);
 
         let keep_numbered_objects = needs_crate_object ||
                 (user_wants_objects && sess.opts.cg.codegen_units > 1);
 
-        for i in 0..trans.modules.len() {
+        for module_name in trans.modules.iter().map(|m| Some(&m.name[..])) {
             if modules_config.emit_obj && !keep_numbered_objects {
-                let ext = format!("{}.o", i);
-                remove(sess, &crate_output.with_extension(&ext));
+                let path = crate_output.temp_path(OutputType::Object, module_name);
+                remove(sess, &path);
             }
 
             if modules_config.emit_bc && !keep_numbered_bitcode {
-                let ext = format!("{}.bc", i);
-                remove(sess, &crate_output.with_extension(&ext));
+                let path = crate_output.temp_path(OutputType::Bitcode, module_name);
+                remove(sess, &path);
             }
         }
 
         if metadata_config.emit_bc && !user_wants_bitcode {
-            remove(sess, &crate_output.with_extension("metadata.bc"));
+            let path = crate_output.temp_path(OutputType::Bitcode,
+                                              Some(&trans.metadata_module.name[..]));
+            remove(sess, &path);
         }
     }
 
@@ -874,28 +895,31 @@ pub fn run_passes(sess: &Session,
 struct WorkItem {
     mtrans: ModuleTranslation,
     config: ModuleConfig,
-    output_names: OutputFilenames,
-    name_extra: String
+    output_names: OutputFilenames
 }
 
 fn build_work_item(sess: &Session,
                    mtrans: ModuleTranslation,
                    config: ModuleConfig,
-                   output_names: OutputFilenames,
-                   name_extra: String)
+                   output_names: OutputFilenames)
                    -> WorkItem
 {
     let mut config = config;
     config.tm = create_target_machine(sess);
-    WorkItem { mtrans: mtrans, config: config, output_names: output_names,
-               name_extra: name_extra }
+    WorkItem {
+        mtrans: mtrans,
+        config: config,
+        output_names: output_names
+    }
 }
 
 fn execute_work_item(cgcx: &CodegenContext,
                      work_item: WorkItem) {
     unsafe {
-        optimize_and_codegen(cgcx, work_item.mtrans, work_item.config,
-                             work_item.name_extra, work_item.output_names);
+        optimize_and_codegen(cgcx,
+                             work_item.mtrans,
+                             work_item.config,
+                             work_item.output_names);
     }
 }
 
@@ -914,6 +938,8 @@ fn run_work_singlethreaded(sess: &Session,
 fn run_work_multithreaded(sess: &Session,
                           work_items: Vec<WorkItem>,
                           num_workers: usize) {
+    assert!(num_workers > 0);
+
     // Run some workers to process the work items.
     let work_items_arc = Arc::new(Mutex::new(work_items));
     let mut diag_emitter = SharedEmitter::new();
@@ -981,7 +1007,7 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
     let (pname, mut cmd, _) = get_linker(sess);
 
     cmd.arg("-c").arg("-o").arg(&outputs.path(OutputType::Object))
-                           .arg(&outputs.temp_path(OutputType::Assembly));
+                           .arg(&outputs.temp_path(OutputType::Assembly, None));
     debug!("{:?}", cmd);
 
     match cmd.output() {
index 7a572fdadc3d795bdac78a20a3bb3ea80449ed96..c080d1f06d00f2a0d9a5bdc1d2f0f817f059d546 100644 (file)
@@ -25,8 +25,6 @@
 
 #![allow(non_camel_case_types)]
 
-pub use self::ValueOrigin::*;
-
 use super::CrateTranslation;
 use super::ModuleTranslation;
 
@@ -60,7 +58,7 @@
 use cleanup::{self, CleanupMethods, DropHint};
 use closure;
 use common::{Block, C_bool, C_bytes_in_context, C_i32, C_int, C_uint, C_integral};
-use collector::{self, TransItemState, TransItemCollectionMode};
+use collector::{self, TransItemCollectionMode};
 use common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef};
 use common::{CrateContext, DropFlagHintsMap, Field, FunctionContext};
 use common::{Result, NodeIdAndSpan, VariantInfo};
@@ -82,6 +80,7 @@
 use mir;
 use monomorphize::{self, Instance};
 use partitioning::{self, PartitioningStrategy, CodegenUnit};
+use symbol_map::SymbolMap;
 use symbol_names_test;
 use trans_item::TransItem;
 use tvec;
@@ -99,6 +98,7 @@
 use std::ffi::{CStr, CString};
 use std::cell::{Cell, RefCell};
 use std::collections::{HashMap, HashSet};
+use std::rc::Rc;
 use std::str;
 use std::{i8, i16, i32, i64};
 use syntax_pos::{Span, DUMMY_SP};
@@ -1407,19 +1407,17 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
     pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
                llfndecl: ValueRef,
                fn_ty: FnType,
-               definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi)>,
+               definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi, ast::NodeId)>,
                block_arena: &'blk TypedArena<common::BlockS<'blk, 'tcx>>)
                -> FunctionContext<'blk, 'tcx> {
-        let (param_substs, def_id) = match definition {
-            Some((instance, _, _)) => {
+        let (param_substs, def_id, inlined_id) = match definition {
+            Some((instance, _, _, inlined_id)) => {
                 common::validate_substs(instance.substs);
-                (instance.substs, Some(instance.def))
+                (instance.substs, Some(instance.def), Some(inlined_id))
             }
-            None => (ccx.tcx().mk_substs(Substs::empty()), None)
+            None => (ccx.tcx().mk_substs(Substs::empty()), None, None)
         };
 
-        let inlined_did = def_id.and_then(|def_id| inline::get_local_instance(ccx, def_id));
-        let inlined_id = inlined_did.and_then(|id| ccx.tcx().map.as_local_node_id(id));
         let local_id = def_id.and_then(|id| ccx.tcx().map.as_local_node_id(id));
 
         debug!("FunctionContext::new({})",
@@ -1454,7 +1452,7 @@ pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
         };
 
         let debug_context = if let (false, Some(definition)) = (no_debug, definition) {
-            let (instance, sig, abi) = definition;
+            let (instance, sig, abi, _) = definition;
             debuginfo::create_function_debug_context(ccx, instance, sig, abi, llfndecl)
         } else {
             debuginfo::empty_function_debug_context(ccx)
@@ -1832,10 +1830,6 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                closure_env: closure::ClosureEnv) {
     ccx.stats().n_closures.set(ccx.stats().n_closures.get() + 1);
 
-    if collector::collecting_debug_information(ccx.shared()) {
-        ccx.record_translation_item_as_generated(TransItem::Fn(instance));
-    }
-
     let _icx = push_ctxt("trans_closure");
     if !ccx.sess().no_landing_pads() {
         attributes::emit_uwtable(llfndecl, true);
@@ -1850,7 +1844,11 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let (arena, fcx): (TypedArena<_>, FunctionContext);
     arena = TypedArena::new();
-    fcx = FunctionContext::new(ccx, llfndecl, fn_ty, Some((instance, sig, abi)), &arena);
+    fcx = FunctionContext::new(ccx,
+                               llfndecl,
+                               fn_ty,
+                               Some((instance, sig, abi, inlined_id)),
+                               &arena);
 
     if fcx.mir.is_some() {
         return mir::trans_mir(&fcx);
@@ -1916,35 +1914,47 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
     fcx.finish(bcx, fn_cleanup_debug_loc.debug_loc());
 }
 
-/// Creates an LLVM function corresponding to a source language function.
-pub fn trans_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
-                          decl: &hir::FnDecl,
-                          body: &hir::Block,
-                          llfndecl: ValueRef,
-                          param_substs: &'tcx Substs<'tcx>,
-                          id: ast::NodeId) {
-    let _s = StatRecorder::new(ccx, ccx.tcx().node_path_str(id));
-    debug!("trans_fn(param_substs={:?})", param_substs);
-    let _icx = push_ctxt("trans_fn");
-    let def_id = if let Some(&def_id) = ccx.external_srcs().borrow().get(&id) {
-        def_id
-    } else {
-        ccx.tcx().map.local_def_id(id)
-    };
-    let fn_ty = ccx.tcx().lookup_item_type(def_id).ty;
-    let fn_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &fn_ty);
+pub fn trans_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, instance: Instance<'tcx>) {
+    let instance = inline::maybe_inline_instance(ccx, instance);
+
+    let fn_node_id = ccx.tcx().map.as_local_node_id(instance.def).unwrap();
+
+    let _s = StatRecorder::new(ccx, ccx.tcx().node_path_str(fn_node_id));
+    debug!("trans_instance(instance={:?})", instance);
+    let _icx = push_ctxt("trans_instance");
+
+    let item = ccx.tcx().map.find(fn_node_id).unwrap();
+
+    let fn_ty = ccx.tcx().lookup_item_type(instance.def).ty;
+    let fn_ty = ccx.tcx().erase_regions(&fn_ty);
+    let fn_ty = monomorphize::apply_param_substs(ccx.tcx(), instance.substs, &fn_ty);
+
     let sig = ccx.tcx().erase_late_bound_regions(fn_ty.fn_sig());
     let sig = ccx.tcx().normalize_associated_type(&sig);
     let abi = fn_ty.fn_abi();
-    trans_closure(ccx,
-                  decl,
-                  body,
-                  llfndecl,
-                  Instance::new(def_id, param_substs),
-                  id,
-                  &sig,
-                  abi,
-                  closure::ClosureEnv::NotClosure);
+
+    let lldecl = match ccx.instances().borrow().get(&instance) {
+        Some(&val) => val,
+        None => bug!("Instance `{:?}` not already declared", instance)
+    };
+
+    match item {
+        hir_map::NodeItem(&hir::Item {
+            node: hir::ItemFn(ref decl, _, _, _, _, ref body), ..
+        }) |
+        hir_map::NodeTraitItem(&hir::TraitItem {
+            node: hir::MethodTraitItem(
+                hir::MethodSig { ref decl, .. }, Some(ref body)), ..
+        }) |
+        hir_map::NodeImplItem(&hir::ImplItem {
+            node: hir::ImplItemKind::Method(
+                hir::MethodSig { ref decl, .. }, ref body), ..
+        }) => {
+            trans_closure(ccx, decl, body, lldecl, instance,
+                          fn_node_id, &sig, abi, closure::ClosureEnv::NotClosure);
+        }
+        _ => bug!("Instance is a {:?}?", item)
+    }
 }
 
 pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
@@ -2170,86 +2180,10 @@ pub fn llvm_linkage_by_name(name: &str) -> Option<Linkage> {
     }
 }
 
-
-/// Enum describing the origin of an LLVM `Value`, for linkage purposes.
-#[derive(Copy, Clone)]
-pub enum ValueOrigin {
-    /// The LLVM `Value` is in this context because the corresponding item was
-    /// assigned to the current compilation unit.
-    OriginalTranslation,
-    /// The `Value`'s corresponding item was assigned to some other compilation
-    /// unit, but the `Value` was translated in this context anyway because the
-    /// item is marked `#[inline]`.
-    InlinedCopy,
-}
-
-/// Set the appropriate linkage for an LLVM `ValueRef` (function or global).
-/// If the `llval` is the direct translation of a specific Rust item, `id`
-/// should be set to the `NodeId` of that item.  (This mapping should be
-/// 1-to-1, so monomorphizations and drop/visit glue should have `id` set to
-/// `None`.)  `llval_origin` indicates whether `llval` is the translation of an
-/// item assigned to `ccx`'s compilation unit or an inlined copy of an item
-/// assigned to a different compilation unit.
-pub fn update_linkage(ccx: &CrateContext,
-                      llval: ValueRef,
-                      id: Option<ast::NodeId>,
-                      llval_origin: ValueOrigin) {
-    match llval_origin {
-        InlinedCopy => {
-            // `llval` is a translation of an item defined in a separate
-            // compilation unit.  This only makes sense if there are at least
-            // two compilation units.
-            assert!(ccx.sess().opts.cg.codegen_units > 1 ||
-                    ccx.sess().opts.debugging_opts.incremental.is_some());
-            // `llval` is a copy of something defined elsewhere, so use
-            // `AvailableExternallyLinkage` to avoid duplicating code in the
-            // output.
-            llvm::SetLinkage(llval, llvm::AvailableExternallyLinkage);
-            return;
-        },
-        OriginalTranslation => {},
-    }
-
-    if let Some(id) = id {
-        let item = ccx.tcx().map.get(id);
-        if let hir_map::NodeItem(i) = item {
-            if let Some(name) = attr::first_attr_value_str_by_name(&i.attrs, "linkage") {
-                if let Some(linkage) = llvm_linkage_by_name(&name) {
-                    llvm::SetLinkage(llval, linkage);
-                } else {
-                    ccx.sess().span_fatal(i.span, "invalid linkage specified");
-                }
-                return;
-            }
-        }
-    }
-
-    let (is_reachable, is_generic) = if let Some(id) = id {
-        (ccx.reachable().contains(&id), false)
-    } else {
-        (false, true)
-    };
-
-    // We need external linkage for items reachable from other translation units, this include
-    // other codegen units in case of parallel compilations.
-    if is_reachable || ccx.sess().opts.cg.codegen_units > 1 {
-        if is_generic {
-            // This only happens with multiple codegen units, in which case we need to use weak_odr
-            // linkage because other crates might expose the same symbol. We cannot use
-            // linkonce_odr here because the symbol might then get dropped before the other codegen
-            // units get to link it.
-            llvm::SetUniqueComdat(ccx.llmod(), llval);
-            llvm::SetLinkage(llval, llvm::WeakODRLinkage);
-        } else {
-            llvm::SetLinkage(llval, llvm::ExternalLinkage);
-        }
-    } else {
-        llvm::SetLinkage(llval, llvm::InternalLinkage);
-    }
-}
-
-fn set_global_section(ccx: &CrateContext, llval: ValueRef, i: &hir::Item) {
-    if let Some(sect) = attr::first_attr_value_str_by_name(&i.attrs, "link_section") {
+pub fn set_link_section(ccx: &CrateContext,
+                        llval: ValueRef,
+                        attrs: &[ast::Attribute]) {
+    if let Some(sect) = attr::first_attr_value_str_by_name(attrs, "link_section") {
         if contains_null(&sect) {
             ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", &sect));
         }
@@ -2260,109 +2194,60 @@ fn set_global_section(ccx: &CrateContext, llval: ValueRef, i: &hir::Item) {
     }
 }
 
-pub fn trans_item(ccx: &CrateContext, item: &hir::Item) {
+fn trans_item(ccx: &CrateContext, item: &hir::Item) {
     let _icx = push_ctxt("trans_item");
 
-    let tcx = ccx.tcx();
-    let from_external = ccx.external_srcs().borrow().contains_key(&item.id);
-
     match item.node {
-        hir::ItemFn(ref decl, _, _, _, ref generics, ref body) => {
-            if !generics.is_type_parameterized() {
-                let trans_everywhere = attr::requests_inline(&item.attrs);
-                // Ignore `trans_everywhere` for cross-crate inlined items
-                // (`from_external`).  `trans_item` will be called once for each
-                // compilation unit that references the item, so it will still get
-                // translated everywhere it's needed.
-                for (ref ccx, is_origin) in ccx.maybe_iter(!from_external && trans_everywhere) {
-                    let def_id = tcx.map.local_def_id(item.id);
-                    let empty_substs = ccx.empty_substs_for_def_id(def_id);
-                    let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                    trans_fn(ccx, &decl, &body, llfn, empty_substs, item.id);
-                    set_global_section(ccx, llfn, item);
-                    update_linkage(ccx,
-                                   llfn,
-                                   Some(item.id),
-                                   if is_origin {
-                                       OriginalTranslation
-                                   } else {
-                                       InlinedCopy
-                                   });
-
-                    if is_entry_fn(ccx.sess(), item.id) {
-                        create_entry_wrapper(ccx, item.span, llfn);
-                        // check for the #[rustc_error] annotation, which forces an
-                        // error in trans. This is used to write compile-fail tests
-                        // that actually test that compilation succeeds without
-                        // reporting an error.
-                        if tcx.has_attr(def_id, "rustc_error") {
-                            tcx.sess.span_fatal(item.span, "compilation successful");
-                        }
-                    }
-                }
-            }
-        }
-        hir::ItemImpl(_, _, ref generics, _, _, ref impl_items) => {
-            // Both here and below with generic methods, be sure to recurse and look for
-            // items that we need to translate.
-            if !generics.ty_params.is_empty() {
-                return;
-            }
-
-            for impl_item in impl_items {
-                if let hir::ImplItemKind::Method(ref sig, ref body) = impl_item.node {
-                    if sig.generics.ty_params.is_empty() {
-                        let trans_everywhere = attr::requests_inline(&impl_item.attrs);
-                        for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
-                            let def_id = tcx.map.local_def_id(impl_item.id);
-                            let empty_substs = ccx.empty_substs_for_def_id(def_id);
-                            let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                            trans_fn(ccx, &sig.decl, body, llfn, empty_substs, impl_item.id);
-                            update_linkage(ccx, llfn, Some(impl_item.id),
-                                if is_origin {
-                                    OriginalTranslation
-                                } else {
-                                    InlinedCopy
-                                });
-                        }
-                    }
-                }
-            }
-        }
         hir::ItemEnum(ref enum_definition, ref gens) => {
             if gens.ty_params.is_empty() {
                 // sizes only make sense for non-generic types
                 enum_variant_size_lint(ccx, enum_definition, item.span, item.id);
             }
         }
-        hir::ItemStatic(_, m, ref expr) => {
-            let g = match consts::trans_static(ccx, m, expr, item.id, &item.attrs) {
-                Ok(g) => g,
-                Err(err) => ccx.tcx().sess.span_fatal(expr.span, &err.description()),
-            };
-            set_global_section(ccx, g, item);
-            update_linkage(ccx, g, Some(item.id), OriginalTranslation);
+        hir::ItemFn(..) |
+        hir::ItemImpl(..) |
+        hir::ItemStatic(..) => {
+            // Don't do anything here. Translation has been moved to
+            // being "collector-driven".
         }
         _ => {}
     }
 }
 
-pub fn is_entry_fn(sess: &Session, node_id: ast::NodeId) -> bool {
-    match *sess.entry_fn.borrow() {
-        Some((entry_id, _)) => node_id == entry_id,
-        None => false,
+/// Create the `main` function which will initialise the rust runtime and call
+/// users’ main function.
+pub fn maybe_create_entry_wrapper(ccx: &CrateContext) {
+    let (main_def_id, span) = match *ccx.sess().entry_fn.borrow() {
+        Some((id, span)) => {
+            (ccx.tcx().map.local_def_id(id), span)
+        }
+        None => return,
+    };
+
+    // check for the #[rustc_error] annotation, which forces an
+    // error in trans. This is used to write compile-fail tests
+    // that actually test that compilation succeeds without
+    // reporting an error.
+    if ccx.tcx().has_attr(main_def_id, "rustc_error") {
+        ccx.tcx().sess.span_fatal(span, "compilation successful");
+    }
+
+    let instance = Instance::mono(ccx.shared(), main_def_id);
+
+    if !ccx.codegen_unit().items.contains_key(&TransItem::Fn(instance)) {
+        // We want to create the wrapper in the same codegen unit as Rust's main
+        // function.
+        return;
     }
-}
 
-/// Create the `main` function which will initialise the rust runtime and call users’ main
-/// function.
-pub fn create_entry_wrapper(ccx: &CrateContext, sp: Span, main_llfn: ValueRef) {
+    let main_llfn = Callee::def(ccx, main_def_id, instance.substs).reify(ccx).val;
+
     let et = ccx.sess().entry_type.get().unwrap();
     match et {
         config::EntryMain => {
-            create_entry_fn(ccx, sp, main_llfn, true);
+            create_entry_fn(ccx, span, main_llfn, true);
         }
-        config::EntryStart => create_entry_fn(ccx, sp, main_llfn, false),
+        config::EntryStart => create_entry_fn(ccx, span, main_llfn, false),
         config::EntryNone => {}    // Do nothing.
     }
 
@@ -2483,16 +2368,16 @@ fn internalize_symbols(cx: &CrateContextList, reachable: &HashSet<&str>) {
                 let linkage = llvm::LLVMGetLinkage(val);
                 // We only care about external declarations (not definitions)
                 // and available_externally definitions.
-                if !(linkage == llvm::ExternalLinkage as c_uint &&
-                     llvm::LLVMIsDeclaration(val) != 0) &&
-                   !(linkage == llvm::AvailableExternallyLinkage as c_uint) {
-                    continue;
+                let is_available_externally = linkage == llvm::AvailableExternallyLinkage as c_uint;
+                let is_decl = llvm::LLVMIsDeclaration(val) != 0;
+
+                if is_decl || is_available_externally {
+                    let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
+                        .to_bytes()
+                        .to_vec();
+                    declared.insert(name);
                 }
 
-                let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
-                               .to_bytes()
-                               .to_vec();
-                declared.insert(name);
             }
         }
 
@@ -2502,21 +2387,27 @@ fn internalize_symbols(cx: &CrateContextList, reachable: &HashSet<&str>) {
         for ccx in cx.iter() {
             for val in iter_globals(ccx.llmod()).chain(iter_functions(ccx.llmod())) {
                 let linkage = llvm::LLVMGetLinkage(val);
+
+                let is_external = linkage == llvm::ExternalLinkage as c_uint;
+                let is_weak_odr = linkage == llvm::WeakODRLinkage as c_uint;
+                let is_decl = llvm::LLVMIsDeclaration(val) != 0;
+
                 // We only care about external definitions.
-                if !((linkage == llvm::ExternalLinkage as c_uint ||
-                      linkage == llvm::WeakODRLinkage as c_uint) &&
-                     llvm::LLVMIsDeclaration(val) == 0) {
-                    continue;
-                }
+                if (is_external || is_weak_odr) && !is_decl {
+
+                    let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
+                                .to_bytes()
+                                .to_vec();
+
+                    let is_declared = declared.contains(&name);
+                    let reachable = reachable.contains(str::from_utf8(&name).unwrap());
+
+                    if !is_declared && !reachable {
+                        llvm::SetLinkage(val, llvm::InternalLinkage);
+                        llvm::SetDLLStorageClass(val, llvm::DefaultStorageClass);
+                        llvm::UnsetComdat(val);
+                    }
 
-                let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
-                               .to_bytes()
-                               .to_vec();
-                if !declared.contains(&name) &&
-                   !reachable.contains(str::from_utf8(&name).unwrap()) {
-                    llvm::SetLinkage(val, llvm::InternalLinkage);
-                    llvm::SetDLLStorageClass(val, llvm::DefaultStorageClass);
-                    llvm::UnsetComdat(val);
                 }
             }
         }
@@ -2610,8 +2501,8 @@ fn iter_functions(llmod: llvm::ModuleRef) -> ValueIter {
 ///
 /// This list is later used by linkers to determine the set of symbols needed to
 /// be exposed from a dynamic library and it's also encoded into the metadata.
-pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet {
-    scx.reachable().iter().map(|x| *x).filter(|&id| {
+pub fn filter_reachable_ids(tcx: TyCtxt, reachable: NodeSet) -> NodeSet {
+    reachable.into_iter().filter(|&id| {
         // Next, we want to ignore some FFI functions that are not exposed from
         // this crate. Reachable FFI functions can be lumped into two
         // categories:
@@ -2625,9 +2516,9 @@ pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet {
         //
         // As a result, if this id is an FFI item (foreign item) then we only
         // let it through if it's included statically.
-        match scx.tcx().map.get(id) {
+        match tcx.map.get(id) {
             hir_map::NodeForeignItem(..) => {
-                scx.sess().cstore.is_statically_included_foreign_item(id)
+                tcx.sess.cstore.is_statically_included_foreign_item(id)
             }
 
             // Only consider nodes that actually have exported symbols.
@@ -2637,8 +2528,8 @@ pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet {
                 node: hir::ItemFn(..), .. }) |
             hir_map::NodeImplItem(&hir::ImplItem {
                 node: hir::ImplItemKind::Method(..), .. }) => {
-                let def_id = scx.tcx().map.local_def_id(id);
-                let scheme = scx.tcx().lookup_item_type(def_id);
+                let def_id = tcx.map.local_def_id(id);
+                let scheme = tcx.lookup_item_type(def_id);
                 scheme.generics.types.is_empty()
             }
 
@@ -2660,6 +2551,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let krate = tcx.map.krate();
 
     let ty::CrateAnalysis { export_map, reachable, name, .. } = analysis;
+    let reachable = filter_reachable_ids(tcx, reachable);
 
     let check_overflow = if let Some(v) = tcx.sess.opts.debugging_opts.force_overflow_checks {
         v
@@ -2683,29 +2575,34 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                              reachable,
                                              check_overflow,
                                              check_dropflag);
-
-    let reachable_symbol_ids = filter_reachable_ids(&shared_ccx);
-
     // Translate the metadata.
     let metadata = time(tcx.sess.time_passes(), "write metadata", || {
-        write_metadata(&shared_ccx, &reachable_symbol_ids)
+        write_metadata(&shared_ccx, shared_ccx.reachable())
     });
 
     let metadata_module = ModuleTranslation {
+        name: "metadata".to_string(),
         llcx: shared_ccx.metadata_llcx(),
         llmod: shared_ccx.metadata_llmod(),
     };
     let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
 
-    let codegen_units = collect_and_partition_translation_items(&shared_ccx);
+    // Run the translation item collector and partition the collected items into
+    // codegen units.
+    let (codegen_units, symbol_map) = collect_and_partition_translation_items(&shared_ccx);
     let codegen_unit_count = codegen_units.len();
-    assert!(tcx.sess.opts.cg.codegen_units == codegen_unit_count ||
-            tcx.sess.opts.debugging_opts.incremental.is_some());
 
-    let crate_context_list = CrateContextList::new(&shared_ccx, codegen_units);
+    let symbol_map = Rc::new(symbol_map);
 
+    let crate_context_list = CrateContextList::new(&shared_ccx,
+                                                   codegen_units,
+                                                   symbol_map.clone());
     let modules = crate_context_list.iter()
-        .map(|ccx| ModuleTranslation { llcx: ccx.llcx(), llmod: ccx.llmod() })
+        .map(|ccx| ModuleTranslation {
+            name: String::from(&ccx.codegen_unit().name[..]),
+            llcx: ccx.llcx(),
+            llmod: ccx.llmod()
+        })
         .collect();
 
     // Skip crate items and just output metadata in -Z no-trans mode.
@@ -2722,26 +2619,30 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         };
     }
 
-    {
-        let ccx = crate_context_list.get_ccx(0);
+    // Instantiate translation items without filling out definitions yet...
+    for ccx in crate_context_list.iter() {
+        let trans_items = ccx.codegen_unit()
+                             .items_in_deterministic_order(tcx, &symbol_map);
 
-        // Translate all items. See `TransModVisitor` for
-        // details on why we walk in this particular way.
-        {
-            let _icx = push_ctxt("text");
-            intravisit::walk_mod(&mut TransItemsWithinModVisitor { ccx: &ccx }, &krate.module);
-            krate.visit_all_items(&mut TransModVisitor { ccx: &ccx });
+        for (trans_item, linkage) in trans_items {
+            trans_item.predefine(&ccx, linkage);
         }
-
-        collector::print_collection_results(ccx.shared());
-
-        symbol_names_test::report_symbol_names(&ccx);
     }
 
+    // ... and now that we have everything pre-defined, fill out those definitions.
     for ccx in crate_context_list.iter() {
-        if ccx.sess().opts.debuginfo != NoDebugInfo {
-            debuginfo::finalize(&ccx);
+        let trans_items = ccx.codegen_unit()
+                             .items_in_deterministic_order(tcx, &symbol_map);
+
+        for (trans_item, _) in trans_items {
+           trans_item.define(&ccx);
         }
+
+        // If this codegen unit contains the main function, also create the
+        // wrapper here
+        maybe_create_entry_wrapper(&ccx);
+
+        // Run replace-all-uses-with for statics that need it
         for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
             unsafe {
                 let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
@@ -2749,6 +2650,26 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                 llvm::LLVMDeleteGlobal(old_g);
             }
         }
+
+        // Finalize debuginfo
+        if ccx.sess().opts.debuginfo != NoDebugInfo {
+            debuginfo::finalize(&ccx);
+        }
+    }
+
+    symbol_names_test::report_symbol_names(&shared_ccx);
+
+    {
+        let ccx = crate_context_list.get_ccx(0);
+
+        // FIXME: #34018
+        // At this point, we only walk the HIR for running
+        // enum_variant_size_lint(). This should arguably be moved somewhere
+        // else.
+        {
+            intravisit::walk_mod(&mut TransItemsWithinModVisitor { ccx: &ccx }, &krate.module);
+            krate.visit_all_items(&mut TransModVisitor { ccx: &ccx });
+        }
     }
 
     if shared_ccx.sess().trans_stats() {
@@ -2758,6 +2679,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         println!("n_null_glues: {}", stats.n_null_glues.get());
         println!("n_real_glues: {}", stats.n_real_glues.get());
 
+        println!("n_fallback_instantiations: {}", stats.n_fallback_instantiations.get());
+
         println!("n_fns: {}", stats.n_fns.get());
         println!("n_monos: {}", stats.n_monos.get());
         println!("n_inlines: {}", stats.n_inlines.get());
@@ -2774,6 +2697,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             }
         }
     }
+
     if shared_ccx.sess().count_llvm_insns() {
         for (k, v) in shared_ccx.stats().llvm_insns.borrow().iter() {
             println!("{:7} {}", *v, *k);
@@ -2781,10 +2705,11 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     let sess = shared_ccx.sess();
-    let mut reachable_symbols = reachable_symbol_ids.iter().map(|&id| {
+    let mut reachable_symbols = shared_ccx.reachable().iter().map(|&id| {
         let def_id = shared_ccx.tcx().map.local_def_id(id);
-        Instance::mono(&shared_ccx, def_id).symbol_name(&shared_ccx)
+        symbol_for_def_id(def_id, &shared_ccx, &symbol_map)
     }).collect::<Vec<_>>();
+
     if sess.entry_fn.borrow().is_some() {
         reachable_symbols.push("main".to_string());
     }
@@ -2806,7 +2731,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         reachable_symbols.extend(syms.into_iter().filter(|did| {
             sess.cstore.is_extern_item(shared_ccx.tcx(), *did)
         }).map(|did| {
-            Instance::mono(&shared_ccx, did).symbol_name(&shared_ccx)
+            symbol_for_def_id(did, &shared_ccx, &symbol_map)
         }));
     }
 
@@ -2821,6 +2746,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     let linker_info = LinkerInfo::new(&shared_ccx, &reachable_symbols);
+
     CrateTranslation {
         modules: modules,
         metadata_module: metadata_module,
@@ -2899,7 +2825,7 @@ fn visit_item(&mut self, i: &hir::Item) {
 }
 
 fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>)
-                                                     -> Vec<CodegenUnit<'tcx>> {
+                                                     -> (Vec<CodegenUnit<'tcx>>, SymbolMap<'tcx>) {
     let time_passes = scx.sess().time_passes();
 
     let collection_mode = match scx.sess().opts.debugging_opts.print_trans_items {
@@ -2922,10 +2848,13 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
         None => TransItemCollectionMode::Lazy
     };
 
-    let (items, inlining_map) = time(time_passes, "translation item collection", || {
-        collector::collect_crate_translation_items(&scx, collection_mode)
+    let (items, inlining_map) =
+        time(time_passes, "translation item collection", || {
+            collector::collect_crate_translation_items(&scx, collection_mode)
     });
 
+    let symbol_map = SymbolMap::build(scx, items.iter().cloned());
+
     let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() {
         PartitioningStrategy::PerModule
     } else {
@@ -2936,9 +2865,21 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
         partitioning::partition(scx.tcx(),
                                 items.iter().cloned(),
                                 strategy,
-                                &inlining_map)
+                                &inlining_map,
+                                scx.reachable())
     });
 
+    assert!(scx.tcx().sess.opts.cg.codegen_units == codegen_units.len() ||
+            scx.tcx().sess.opts.debugging_opts.incremental.is_some());
+
+    {
+        let mut ccx_map = scx.translation_items().borrow_mut();
+
+        for trans_item in items.iter().cloned() {
+            ccx_map.insert(trans_item);
+        }
+    }
+
     if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
         let mut item_to_cgus = HashMap::new();
 
@@ -2990,13 +2931,26 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
         for item in item_keys {
             println!("TRANS_ITEM {}", item);
         }
+    }
 
-        let mut ccx_map = scx.translation_items().borrow_mut();
+    (codegen_units, symbol_map)
+}
 
-        for cgi in items {
-            ccx_map.insert(cgi, TransItemState::PredictedButNotGenerated);
+fn symbol_for_def_id<'a, 'tcx>(def_id: DefId,
+                               scx: &SharedCrateContext<'a, 'tcx>,
+                               symbol_map: &SymbolMap<'tcx>)
+                               -> String {
+    // Just try to look things up in the symbol map. If nothing's there, we
+    // recompute.
+    if let Some(node_id) = scx.tcx().map.as_local_node_id(def_id) {
+        if let Some(sym) = symbol_map.get(TransItem::Static(node_id)) {
+            return sym.to_owned();
         }
     }
 
-    codegen_units
+    let instance = Instance::mono(scx, def_id);
+
+    symbol_map.get(TransItem::Fn(instance))
+              .map(str::to_owned)
+              .unwrap_or_else(|| instance.symbol_name(scx))
 }
index 9ea65532b35b61c8c3a1cd35deda4ca89861ad34..983ee564c35b1051fcc447b514d4719d05578707 100644 (file)
@@ -46,6 +46,7 @@
 use machine::llalign_of_min;
 use meth;
 use monomorphize::{self, Instance};
+use trans_item::TransItem;
 use type_::Type;
 use type_of;
 use value::Value;
@@ -302,7 +303,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
     let tcx = ccx.tcx();
 
     // Normalize the type for better caching.
-    let bare_fn_ty = tcx.erase_regions(&bare_fn_ty);
+    let bare_fn_ty = tcx.normalize_associated_type(&bare_fn_ty);
 
     // If this is an impl of `Fn` or `FnMut` trait, the receiver is `&self`.
     let is_by_ref = match closure_kind {
@@ -468,7 +469,7 @@ fn is_named_tuple_constructor(tcx: TyCtxt, def_id: DefId) -> bool {
         // Should be either intra-crate or inlined.
         assert_eq!(def_id.krate, LOCAL_CRATE);
 
-        let substs = tcx.mk_substs(substs.clone().erase_regions());
+        let substs = tcx.normalize_associated_type(&substs);
         let (val, fn_ty) = monomorphize::monomorphic_fn(ccx, def_id, substs);
         let fn_ptr_ty = match fn_ty.sty {
             ty::TyFnDef(_, _, fty) => {
@@ -536,13 +537,15 @@ fn is_named_tuple_constructor(tcx: TyCtxt, def_id: DefId) -> bool {
     // reference. It also occurs when testing libcore and in some
     // other weird situations. Annoying.
 
-    let sym = instance.symbol_name(ccx.shared());
+    let sym = ccx.symbol_map().get_or_compute(ccx.shared(),
+                                              TransItem::Fn(instance));
+
     let llptrty = type_of::type_of(ccx, fn_ptr_ty);
     let llfn = if let Some(llfn) = declare::get_declared_value(ccx, &sym) {
         if let Some(span) = local_item {
             if declare::get_defined_value(ccx, &sym).is_some() {
                 ccx.sess().span_fatal(span,
-                    &format!("symbol `{}` is already defined", sym));
+                    &format!("symbol `{}` is already defined", &sym));
             }
         }
 
index 9196cfce16feb3c85f2b937605b18c169f1689ea..90443d9ec4f7067b0406ec02db37d4ab974e0f39 100644 (file)
@@ -10,7 +10,7 @@
 
 use arena::TypedArena;
 use back::symbol_names;
-use llvm::{ValueRef, get_param, get_params};
+use llvm::{self, ValueRef, get_param, get_params};
 use rustc::hir::def_id::DefId;
 use abi::{Abi, FnType};
 use adt;
@@ -167,16 +167,16 @@ fn get_or_create_closure_declaration<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             variadic: false
         })
     }));
-    let llfn = declare::define_internal_fn(ccx, &symbol, function_type);
+    let llfn = declare::declare_fn(ccx, &symbol, function_type);
 
-    // set an inline hint for all closures
-    attributes::inline(llfn, attributes::InlineAttr::Hint);
     attributes::set_frame_pointer_elimination(ccx, llfn);
 
     debug!("get_or_create_declaration_if_closure(): inserting new \
             closure {:?}: {:?}",
            instance, Value(llfn));
-    ccx.instances().borrow_mut().insert(instance, llfn);
+
+    // NOTE: We do *not* store llfn in the ccx.instances() map here,
+    //       that is only done, when the closures body is translated.
 
     llfn
 }
@@ -197,8 +197,8 @@ pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>,
     // (*) Note that in the case of inlined functions, the `closure_def_id` will be the
     // defid of the closure in its original crate, whereas `id` will be the id of the local
     // inlined copy.
-
-    let param_substs = closure_substs.func_substs;
+    debug!("trans_closure_expr(id={:?}, closure_def_id={:?}, closure_substs={:?})",
+           id, closure_def_id, closure_substs);
 
     let ccx = match dest {
         Dest::SaveIn(bcx, _) => bcx.ccx(),
@@ -207,39 +207,49 @@ pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>,
     let tcx = ccx.tcx();
     let _icx = push_ctxt("closure::trans_closure_expr");
 
-    debug!("trans_closure_expr(id={:?}, closure_def_id={:?}, closure_substs={:?})",
-           id, closure_def_id, closure_substs);
-
-    let llfn = get_or_create_closure_declaration(ccx, closure_def_id, closure_substs);
-
-    // Get the type of this closure. Use the current `param_substs` as
-    // the closure substitutions. This makes sense because the closure
-    // takes the same set of type arguments as the enclosing fn, and
-    // this function (`trans_closure`) is invoked at the point
-    // of the closure expression.
-
-    let sig = &tcx.closure_type(closure_def_id, closure_substs).sig;
-    let sig = tcx.erase_late_bound_regions(sig);
-    let sig = tcx.normalize_associated_type(&sig);
-
-    let closure_type = tcx.mk_closure_from_closure_substs(closure_def_id,
-                                                          closure_substs);
-    let sig = ty::FnSig {
-        inputs: Some(get_self_type(tcx, closure_def_id, closure_type))
-                    .into_iter().chain(sig.inputs).collect(),
-        output: sig.output,
-        variadic: false
-    };
-
-    trans_closure(ccx,
-                  decl,
-                  body,
-                  llfn,
-                  Instance::new(closure_def_id, param_substs),
-                  id,
-                  &sig,
-                  Abi::RustCall,
-                  ClosureEnv::Closure(closure_def_id, id));
+    let param_substs = closure_substs.func_substs;
+    let instance = Instance::new(closure_def_id, param_substs);
+
+    // If we have not done so yet, translate this closure's body
+    if  !ccx.instances().borrow().contains_key(&instance) {
+        let llfn = get_or_create_closure_declaration(ccx, closure_def_id, closure_substs);
+        llvm::SetLinkage(llfn, llvm::WeakODRLinkage);
+        llvm::SetUniqueComdat(ccx.llmod(), llfn);
+
+        // set an inline hint for all closures
+        attributes::inline(llfn, attributes::InlineAttr::Hint);
+
+        // Get the type of this closure. Use the current `param_substs` as
+        // the closure substitutions. This makes sense because the closure
+        // takes the same set of type arguments as the enclosing fn, and
+        // this function (`trans_closure`) is invoked at the point
+        // of the closure expression.
+
+        let sig = &tcx.closure_type(closure_def_id, closure_substs).sig;
+        let sig = tcx.erase_late_bound_regions(sig);
+        let sig = tcx.normalize_associated_type(&sig);
+
+        let closure_type = tcx.mk_closure_from_closure_substs(closure_def_id,
+                                                              closure_substs);
+        let sig = ty::FnSig {
+            inputs: Some(get_self_type(tcx, closure_def_id, closure_type))
+                        .into_iter().chain(sig.inputs).collect(),
+            output: sig.output,
+            variadic: false
+        };
+
+        trans_closure(ccx,
+                      decl,
+                      body,
+                      llfn,
+                      Instance::new(closure_def_id, param_substs),
+                      id,
+                      &sig,
+                      Abi::RustCall,
+                      ClosureEnv::Closure(closure_def_id, id));
+
+        ccx.instances().borrow_mut().insert(instance, llfn);
+    }
 
     // Don't hoist this to the top of the function. It's perfectly legitimate
     // to have a zero-size closure (in which case dest will be `Ignore`) and
@@ -377,7 +387,7 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>(
     // Create the by-value helper.
     let function_name =
         symbol_names::internal_name_from_type_and_suffix(ccx, llonce_fn_ty, "once_shim");
-    let lloncefn = declare::define_internal_fn(ccx, &function_name, llonce_fn_ty);
+    let lloncefn = declare::declare_fn(ccx, &function_name, llonce_fn_ty);
     attributes::set_frame_pointer_elimination(ccx, lloncefn);
 
     let (block_arena, fcx): (TypedArena<_>, FunctionContext);
index eea6aec37260e9e918ed8417a43e850919871494..ba2cd2ba699926ca40e274f5e015f590b2c04f19 100644 (file)
 use syntax::abi::Abi;
 use errors;
 use syntax_pos::DUMMY_SP;
+use syntax::ast::NodeId;
 use base::custom_coerce_unsize_info;
 use context::SharedCrateContext;
 use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized};
@@ -349,17 +350,14 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
                 || format!("Could not find MIR for static: {:?}", def_id));
 
             let empty_substs = scx.empty_substs_for_def_id(def_id);
-            let mut visitor = MirNeighborCollector {
+            let visitor = MirNeighborCollector {
                 scx: scx,
                 mir: &mir,
                 output: &mut neighbors,
                 param_substs: empty_substs
             };
 
-            visitor.visit_mir(&mir);
-            for promoted in &mir.promoted {
-                visitor.visit_mir(promoted);
-            }
+            visit_mir_and_promoted(visitor, &mir);
         }
         TransItem::Fn(instance) => {
             // Keep track of the monomorphization recursion depth
@@ -372,17 +370,14 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
             let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(instance.def),
                 || format!("Could not find MIR for function: {}", instance));
 
-            let mut visitor = MirNeighborCollector {
+            let visitor = MirNeighborCollector {
                 scx: scx,
                 mir: &mir,
                 output: &mut neighbors,
                 param_substs: instance.substs
             };
 
-            visitor.visit_mir(&mir);
-            for promoted in &mir.promoted {
-                visitor.visit_mir(promoted);
-            }
+            visit_mir_and_promoted(visitor, &mir);
         }
     }
 
@@ -456,12 +451,25 @@ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>) {
         match *rvalue {
             mir::Rvalue::Aggregate(mir::AggregateKind::Closure(def_id,
                                                                ref substs), _) => {
-                assert!(can_have_local_instance(self.scx.tcx(), def_id));
-                let trans_item = create_fn_trans_item(self.scx.tcx(),
-                                                      def_id,
-                                                      substs.func_substs,
-                                                      self.param_substs);
-                self.output.push(trans_item);
+                let mir = errors::expect(self.scx.sess().diagnostic(),
+                                         self.scx.get_mir(def_id),
+                                         || {
+                    format!("Could not find MIR for closure: {:?}", def_id)
+                });
+
+                let concrete_substs = monomorphize::apply_param_substs(self.scx.tcx(),
+                                                                       self.param_substs,
+                                                                       &substs.func_substs);
+                let concrete_substs = self.scx.tcx().erase_regions(&concrete_substs);
+
+                let visitor = MirNeighborCollector {
+                    scx: self.scx,
+                    mir: &mir,
+                    output: self.output,
+                    param_substs: concrete_substs
+                };
+
+                visit_mir_and_promoted(visitor, &mir);
             }
             // When doing an cast from a regular pointer to a fat pointer, we
             // have to instantiate all methods of the trait being cast to, so we
@@ -624,7 +632,8 @@ fn visit_terminator_kind(&mut self,
                             let operand_ty = monomorphize::apply_param_substs(tcx,
                                                                               self.param_substs,
                                                                               &mt.ty);
-                            self.output.push(TransItem::DropGlue(DropGlueKind::Ty(operand_ty)));
+                            let ty = glue::get_drop_glue_type(tcx, operand_ty);
+                            self.output.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
                         } else {
                             bug!("Has the drop_in_place() intrinsic's signature changed?")
                         }
@@ -1070,7 +1079,6 @@ fn visit_item(&mut self, item: &'v hir::Item) {
             hir::ItemTy(..)          |
             hir::ItemDefaultImpl(..) |
             hir::ItemTrait(..)       |
-            hir::ItemConst(..)       |
             hir::ItemMod(..)         => {
                 // Nothing to do, just keep recursing...
             }
@@ -1107,9 +1115,14 @@ fn visit_item(&mut self, item: &'v hir::Item) {
                                         self.scx.tcx().map.local_def_id(item.id)));
                 self.output.push(TransItem::Static(item.id));
             }
-            hir::ItemFn(_, _, constness, _, ref generics, _) => {
-                if !generics.is_type_parameterized() &&
-                   constness == hir::Constness::NotConst {
+            hir::ItemConst(..) => {
+                debug!("RootCollector: ItemConst({})",
+                       def_id_to_string(self.scx.tcx(),
+                                        self.scx.tcx().map.local_def_id(item.id)));
+                add_roots_for_const_item(self.scx, item.id, self.output);
+            }
+            hir::ItemFn(_, _, _, _, ref generics, _) => {
+                if !generics.is_type_parameterized() {
                     let def_id = self.scx.tcx().map.local_def_id(item.id);
 
                     debug!("RootCollector: ItemFn({})",
@@ -1129,9 +1142,8 @@ fn visit_impl_item(&mut self, ii: &'v hir::ImplItem) {
         match ii.node {
             hir::ImplItemKind::Method(hir::MethodSig {
                 ref generics,
-                constness,
                 ..
-            }, _) if constness == hir::Constness::NotConst => {
+            }, _) => {
                 let hir_map = &self.scx.tcx().map;
                 let parent_node_id = hir_map.get_parent_node(ii.id);
                 let is_impl_generic = match hir_map.expect_item(parent_node_id) {
@@ -1228,111 +1240,34 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum TransItemState {
-    PredictedAndGenerated,
-    PredictedButNotGenerated,
-    NotPredictedButGenerated,
-}
+// There are no translation items for constants themselves but their
+// initializers might still contain something that produces translation items,
+// such as cast that introduce a new vtable.
+fn add_roots_for_const_item<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+                                      const_item_node_id: NodeId,
+                                      output: &mut Vec<TransItem<'tcx>>)
+{
+    let def_id = scx.tcx().map.local_def_id(const_item_node_id);
+
+    // Scan the MIR in order to find function calls, closures, and
+    // drop-glue
+    let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(def_id),
+        || format!("Could not find MIR for const: {:?}", def_id));
+
+    let empty_substs = scx.empty_substs_for_def_id(def_id);
+    let visitor = MirNeighborCollector {
+        scx: scx,
+        mir: &mir,
+        output: output,
+        param_substs: empty_substs
+    };
 
-pub fn collecting_debug_information(scx: &SharedCrateContext) -> bool {
-    return cfg!(debug_assertions) &&
-           scx.sess().opts.debugging_opts.print_trans_items.is_some();
+    visit_mir_and_promoted(visitor, &mir);
 }
 
-pub fn print_collection_results<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>) {
-    use std::hash::{Hash, SipHasher, Hasher};
-
-    if !collecting_debug_information(scx) {
-        return;
-    }
-
-    fn hash<T: Hash>(t: &T) -> u64 {
-        let mut s = SipHasher::new();
-        t.hash(&mut s);
-        s.finish()
-    }
-
-    let trans_items = scx.translation_items().borrow();
-
-    {
-        // Check for duplicate item keys
-        let mut item_keys = FnvHashMap();
-
-        for (item, item_state) in trans_items.iter() {
-            let k = item.to_string(scx.tcx());
-
-            if item_keys.contains_key(&k) {
-                let prev: (TransItem, TransItemState) = item_keys[&k];
-                debug!("DUPLICATE KEY: {}", k);
-                debug!(" (1) {:?}, {:?}, hash: {}, raw: {}",
-                       prev.0,
-                       prev.1,
-                       hash(&prev.0),
-                       prev.0.to_raw_string());
-
-                debug!(" (2) {:?}, {:?}, hash: {}, raw: {}",
-                       *item,
-                       *item_state,
-                       hash(item),
-                       item.to_raw_string());
-            } else {
-                item_keys.insert(k, (*item, *item_state));
-            }
-        }
-    }
-
-    let mut predicted_but_not_generated = FnvHashSet();
-    let mut not_predicted_but_generated = FnvHashSet();
-    let mut predicted = FnvHashSet();
-    let mut generated = FnvHashSet();
-
-    for (item, item_state) in trans_items.iter() {
-        let item_key = item.to_string(scx.tcx());
-
-        match *item_state {
-            TransItemState::PredictedAndGenerated => {
-                predicted.insert(item_key.clone());
-                generated.insert(item_key);
-            }
-            TransItemState::PredictedButNotGenerated => {
-                predicted_but_not_generated.insert(item_key.clone());
-                predicted.insert(item_key);
-            }
-            TransItemState::NotPredictedButGenerated => {
-                not_predicted_but_generated.insert(item_key.clone());
-                generated.insert(item_key);
-            }
-        }
-    }
-
-    debug!("Total number of translation items predicted: {}", predicted.len());
-    debug!("Total number of translation items generated: {}", generated.len());
-    debug!("Total number of translation items predicted but not generated: {}",
-           predicted_but_not_generated.len());
-    debug!("Total number of translation items not predicted but generated: {}",
-           not_predicted_but_generated.len());
-
-    if generated.len() > 0 {
-        debug!("Failed to predict {}% of translation items",
-               (100 * not_predicted_but_generated.len()) / generated.len());
-    }
-    if generated.len() > 0 {
-        debug!("Predict {}% too many translation items",
-               (100 * predicted_but_not_generated.len()) / generated.len());
-    }
-
-    debug!("");
-    debug!("Not predicted but generated:");
-    debug!("============================");
-    for item in not_predicted_but_generated {
-        debug!(" - {}", item);
-    }
-
-    debug!("");
-    debug!("Predicted but not generated:");
-    debug!("============================");
-    for item in predicted_but_not_generated {
-        debug!(" - {}", item);
+fn visit_mir_and_promoted<'tcx, V: MirVisitor<'tcx>>(mut visitor: V, mir: &mir::Mir<'tcx>) {
+    visitor.visit_mir(&mir);
+    for promoted in &mir.promoted {
+        visitor.visit_mir(promoted);
     }
 }
index 5596ab0d819e0a3470f7b6a2e360a0c72de87fa2..00feb2cd1de0961f5ec1e97965ea1480d3ee3543 100644 (file)
@@ -21,7 +21,6 @@
 use {abi, adt, closure, debuginfo, expr, machine};
 use base::{self, push_ctxt};
 use callee::Callee;
-use collector;
 use trans_item::TransItem;
 use common::{type_is_sized, C_nil, const_get_elt};
 use common::{CrateContext, C_integral, C_floating, C_bool, C_str_slice, C_bytes, val_ty};
@@ -901,7 +900,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                 }
                 Def::Variant(enum_did, variant_did) => {
                     let vinfo = cx.tcx().lookup_adt_def(enum_did).variant_with_id(variant_did);
-                    match vinfo.kind() {
+                    match vinfo.kind {
                         ty::VariantKind::Unit => {
                             let repr = adt::represent_type(cx, ety);
                             adt::trans_const(cx, &repr, Disr::from(vinfo.disr_val), &[])
@@ -1013,31 +1012,41 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
         return Datum::new(g, ty, Lvalue::new("static"));
     }
 
-    let sym = instance.symbol_name(ccx.shared());
-
     let g = if let Some(id) = ccx.tcx().map.as_local_node_id(def_id) {
+
         let llty = type_of::type_of(ccx, ty);
-        match ccx.tcx().map.get(id) {
+        let (g, attrs) = match ccx.tcx().map.get(id) {
             hir_map::NodeItem(&hir::Item {
-                span, node: hir::ItemStatic(..), ..
+                ref attrs, span, node: hir::ItemStatic(..), ..
             }) => {
-                // If this static came from an external crate, then
-                // we need to get the symbol from metadata instead of
-                // using the current crate's name/version
-                // information in the hash of the symbol
-                debug!("making {}", sym);
-
-                // Create the global before evaluating the initializer;
-                // this is necessary to allow recursive statics.
-                declare::define_global(ccx, &sym, llty).unwrap_or_else(|| {
-                    ccx.sess().span_fatal(span,
-                        &format!("symbol `{}` is already defined", sym))
-                })
+                let sym = ccx.symbol_map()
+                             .get(TransItem::Static(id))
+                             .expect("Local statics should always be in the SymbolMap");
+                // Make sure that this is never executed for something inlined.
+                assert!(!ccx.external_srcs().borrow().contains_key(&id));
+
+                let defined_in_current_codegen_unit = ccx.codegen_unit()
+                                                         .items
+                                                         .contains_key(&TransItem::Static(id));
+                if defined_in_current_codegen_unit {
+                    if declare::get_declared_value(ccx, sym).is_none() {
+                        span_bug!(span, "trans: Static not properly pre-defined?");
+                    }
+                } else {
+                    if declare::get_declared_value(ccx, sym).is_some() {
+                        span_bug!(span, "trans: Conflicting symbol names for static?");
+                    }
+                }
+
+                let g = declare::define_global(ccx, sym, llty).unwrap();
+
+                (g, attrs)
             }
 
             hir_map::NodeForeignItem(&hir::ForeignItem {
                 ref attrs, span, node: hir::ForeignItemStatic(..), ..
             }) => {
+                let sym = instance.symbol_name(ccx.shared());
                 let g = if let Some(name) =
                         attr::first_attr_value_str_by_name(&attrs, "linkage") {
                     // If this is a static with a linkage specified, then we need to handle
@@ -1072,7 +1081,7 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
                         real_name.push_str(&sym);
                         let g2 = declare::define_global(ccx, &real_name, llty).unwrap_or_else(||{
                             ccx.sess().span_fatal(span,
-                                &format!("symbol `{}` is already defined", sym))
+                                &format!("symbol `{}` is already defined", &sym))
                         });
                         llvm::SetLinkage(g2, llvm::InternalLinkage);
                         llvm::LLVMSetInitializer(g2, g1);
@@ -1083,18 +1092,22 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
                     declare::declare_global(ccx, &sym, llty)
                 };
 
-                for attr in attrs {
-                    if attr.check_name("thread_local") {
-                        llvm::set_thread_local(g, true);
-                    }
-                }
-
-                g
+                (g, attrs)
             }
 
             item => bug!("get_static: expected static, found {:?}", item)
+        };
+
+        for attr in attrs {
+            if attr.check_name("thread_local") {
+                llvm::set_thread_local(g, true);
+            }
         }
+
+        g
     } else {
+        let sym = instance.symbol_name(ccx.shared());
+
         // FIXME(nagisa): perhaps the map of externs could be offloaded to llvm somehow?
         // FIXME(nagisa): investigate whether it can be changed into define_global
         let g = declare::declare_global(ccx, &sym, type_of::type_of(ccx, ty));
@@ -1126,11 +1139,6 @@ pub fn trans_static(ccx: &CrateContext,
                     id: ast::NodeId,
                     attrs: &[ast::Attribute])
                     -> Result<ValueRef, ConstEvalErr> {
-
-    if collector::collecting_debug_information(ccx.shared()) {
-        ccx.record_translation_item_as_generated(TransItem::Static(id));
-    }
-
     unsafe {
         let _icx = push_ctxt("trans_static");
         let def_id = ccx.tcx().map.local_def_id(id);
@@ -1197,6 +1205,9 @@ pub fn trans_static(ccx: &CrateContext,
                                "thread_local") {
             llvm::set_thread_local(g, true);
         }
+
+        base::set_link_section(ccx, g, attrs);
+
         Ok(g)
     }
 }
index bfcb1ae33b3019f000314b4e25005f23f1a839aa..b8d231db40a2af48aeda9bc1061b5f3f580f20a2 100644 (file)
 use monomorphize::Instance;
 
 use partitioning::CodegenUnit;
-use collector::TransItemState;
 use trans_item::TransItem;
 use type_::{Type, TypeNames};
 use rustc::ty::subst::{Substs, VecPerParamSpace};
 use rustc::ty::{self, Ty, TyCtxt};
 use session::config::NoDebugInfo;
 use session::Session;
+use symbol_map::SymbolMap;
 use util::sha2::Sha256;
 use util::nodemap::{NodeMap, NodeSet, DefIdMap, FnvHashMap, FnvHashSet};
 
 use std::str;
 use syntax::ast;
 use syntax::parse::token::InternedString;
+use abi::FnType;
 
 pub struct Stats {
     pub n_glues_created: Cell<usize>,
     pub n_null_glues: Cell<usize>,
     pub n_real_glues: Cell<usize>,
+    pub n_fallback_instantiations: Cell<usize>,
     pub n_fns: Cell<usize>,
     pub n_monos: Cell<usize>,
     pub n_inlines: Cell<usize>,
@@ -80,11 +82,9 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
     mir_map: &'a MirMap<'tcx>,
     mir_cache: RefCell<DefIdMap<Rc<mir::Mir<'tcx>>>>,
 
-    available_monomorphizations: RefCell<FnvHashSet<String>>,
-    available_drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, String>>,
     use_dll_storage_attrs: bool,
 
-    translation_items: RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>>,
+    translation_items: RefCell<FnvHashSet<TransItem<'tcx>>>,
     trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>,
 }
 
@@ -99,7 +99,7 @@ pub struct LocalCrateContext<'tcx> {
     codegen_unit: CodegenUnit<'tcx>,
     needs_unwind_cleanup_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>,
     fn_pointer_shims: RefCell<FnvHashMap<Ty<'tcx>, ValueRef>>,
-    drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, ValueRef>>,
+    drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>>,
     /// Track mapping of external ids to local items imported for inlining
     external: RefCell<DefIdMap<Option<ast::NodeId>>>,
     /// Backwards version of the `external` map (inlined items to where they
@@ -172,6 +172,8 @@ pub struct LocalCrateContext<'tcx> {
 
     /// Depth of the current type-of computation - used to bail out
     type_of_depth: Cell<usize>,
+
+    symbol_map: Rc<SymbolMap<'tcx>>,
 }
 
 // Implement DepTrackingMapConfig for `trait_cache`
@@ -198,12 +200,13 @@ pub struct CrateContextList<'a, 'tcx: 'a> {
 impl<'a, 'tcx: 'a> CrateContextList<'a, 'tcx> {
 
     pub fn new(shared_ccx: &'a SharedCrateContext<'a, 'tcx>,
-               codegen_units: Vec<CodegenUnit<'tcx>>)
+               codegen_units: Vec<CodegenUnit<'tcx>>,
+               symbol_map: Rc<SymbolMap<'tcx>>)
                -> CrateContextList<'a, 'tcx> {
         CrateContextList {
             shared: shared_ccx,
             local_ccxs: codegen_units.into_iter().map(|codegen_unit| {
-                LocalCrateContext::new(shared_ccx, codegen_unit)
+                LocalCrateContext::new(shared_ccx, codegen_unit, symbol_map.clone())
             }).collect()
         }
     }
@@ -403,6 +406,7 @@ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
                 n_glues_created: Cell::new(0),
                 n_null_glues: Cell::new(0),
                 n_real_glues: Cell::new(0),
+                n_fallback_instantiations: Cell::new(0),
                 n_fns: Cell::new(0),
                 n_monos: Cell::new(0),
                 n_inlines: Cell::new(0),
@@ -413,10 +417,8 @@ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
             },
             check_overflow: check_overflow,
             check_drop_flag_for_sanity: check_drop_flag_for_sanity,
-            available_monomorphizations: RefCell::new(FnvHashSet()),
-            available_drop_glues: RefCell::new(FnvHashMap()),
             use_dll_storage_attrs: use_dll_storage_attrs,
-            translation_items: RefCell::new(FnvHashMap()),
+            translation_items: RefCell::new(FnvHashSet()),
             trait_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
         }
     }
@@ -479,7 +481,7 @@ pub fn get_mir(&self, def_id: DefId) -> Option<CachedMir<'b, 'tcx>> {
         }
     }
 
-    pub fn translation_items(&self) -> &RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>> {
+    pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> {
         &self.translation_items
     }
 
@@ -515,7 +517,8 @@ pub fn metadata_symbol_name(&self) -> String {
 
 impl<'tcx> LocalCrateContext<'tcx> {
     fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>,
-               codegen_unit: CodegenUnit<'tcx>)
+               codegen_unit: CodegenUnit<'tcx>,
+               symbol_map: Rc<SymbolMap<'tcx>>)
            -> LocalCrateContext<'tcx> {
         unsafe {
             // Append ".rs" to LLVM module identifier.
@@ -574,6 +577,7 @@ fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>,
                 intrinsics: RefCell::new(FnvHashMap()),
                 n_llvm_insns: Cell::new(0),
                 type_of_depth: Cell::new(0),
+                symbol_map: symbol_map,
             };
 
             let (int_type, opaque_vec_type, str_slice_ty, mut local_ccx) = {
@@ -730,7 +734,8 @@ pub fn fn_pointer_shims(&self) -> &RefCell<FnvHashMap<Ty<'tcx>, ValueRef>> {
         &self.local().fn_pointer_shims
     }
 
-    pub fn drop_glues<'a>(&'a self) -> &'a RefCell<FnvHashMap<DropGlueKind<'tcx>, ValueRef>> {
+    pub fn drop_glues<'a>(&'a self)
+                          -> &'a RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>> {
         &self.local().drop_glues
     }
 
@@ -816,14 +821,6 @@ pub fn stats<'a>(&'a self) -> &'a Stats {
         &self.shared.stats
     }
 
-    pub fn available_monomorphizations<'a>(&'a self) -> &'a RefCell<FnvHashSet<String>> {
-        &self.shared.available_monomorphizations
-    }
-
-    pub fn available_drop_glues(&self) -> &RefCell<FnvHashMap<DropGlueKind<'tcx>, String>> {
-        &self.shared.available_drop_glues
-    }
-
     pub fn int_type(&self) -> Type {
         self.local().int_type
     }
@@ -900,22 +897,12 @@ pub fn get_mir(&self, def_id: DefId) -> Option<CachedMir<'b, 'tcx>> {
         self.shared.get_mir(def_id)
     }
 
-    pub fn translation_items(&self) -> &RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>> {
-        &self.shared.translation_items
+    pub fn symbol_map(&self) -> &SymbolMap<'tcx> {
+        &*self.local().symbol_map
     }
 
-    pub fn record_translation_item_as_generated(&self, cgi: TransItem<'tcx>) {
-        if self.sess().opts.debugging_opts.print_trans_items.is_none() {
-            return;
-        }
-
-        let mut codegen_items = self.translation_items().borrow_mut();
-
-        if codegen_items.contains_key(&cgi) {
-            codegen_items.insert(cgi, TransItemState::PredictedAndGenerated);
-        } else {
-            codegen_items.insert(cgi, TransItemState::NotPredictedButGenerated);
-        }
+    pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> {
+        &self.shared.translation_items
     }
 
     /// Given the def-id of some item that has no type parameters, make
index 2b079e7dcc8d9cd483c5db3786435f2750a2dae0..0b75402486812b4706706c1363a284cdd2341ffd 100644 (file)
@@ -313,7 +313,7 @@ fn walk_pattern(cx: &CrateContext,
             }
         }
 
-        PatKind::Path(..) | PatKind::QPath(..) => {
+        PatKind::Path(..) => {
             scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata);
         }
 
index 34dedeede98e0c6ef480df93c2374cad40164e05..b84cc028d0ced97d4166f805aefba6ec8acd2a3d 100644 (file)
@@ -1109,7 +1109,7 @@ struct StructMemberDescriptionFactory<'tcx> {
 impl<'tcx> StructMemberDescriptionFactory<'tcx> {
     fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
                                       -> Vec<MemberDescription> {
-        if let ty::VariantKind::Unit = self.variant.kind() {
+        if self.variant.kind == ty::VariantKind::Unit {
             return Vec::new();
         }
 
@@ -1126,7 +1126,7 @@ fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
         };
 
         self.variant.fields.iter().enumerate().map(|(i, f)| {
-            let name = if let ty::VariantKind::Tuple = self.variant.kind() {
+            let name = if self.variant.kind == ty::VariantKind::Tuple {
                 format!("__{}", i)
             } else {
                 f.name.to_string()
@@ -1356,7 +1356,7 @@ fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
                 // For the metadata of the wrapper struct, we need to create a
                 // MemberDescription of the struct's single field.
                 let sole_struct_member_description = MemberDescription {
-                    name: match non_null_variant.kind() {
+                    name: match non_null_variant.kind {
                         ty::VariantKind::Tuple => "__0".to_string(),
                         ty::VariantKind::Struct => {
                             non_null_variant.fields[0].name.to_string()
@@ -1524,7 +1524,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                                            containing_scope);
 
     // Get the argument names from the enum variant info
-    let mut arg_names: Vec<_> = match variant.kind() {
+    let mut arg_names: Vec<_> = match variant.kind {
         ty::VariantKind::Unit => vec![],
         ty::VariantKind::Tuple => {
             variant.fields
index e6db695943bbee5b5ae940255b31ed3e108f2c5b..2746d3fb6b0b6788e76397504e968b60d460491f 100644 (file)
@@ -138,24 +138,34 @@ pub fn define_global(ccx: &CrateContext, name: &str, ty: Type) -> Option<ValueRe
     }
 }
 
-
 /// Declare a Rust function with an intention to define it.
 ///
 /// Use this function when you intend to define a function. This function will
 /// return panic if the name already has a definition associated with it. This
 /// can happen with #[no_mangle] or #[export_name], for example.
-pub fn define_internal_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
-                                    name: &str,
-                                    fn_type: ty::Ty<'tcx>) -> ValueRef {
+pub fn define_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                           name: &str,
+                           fn_type: ty::Ty<'tcx>) -> ValueRef {
     if get_defined_value(ccx, name).is_some() {
         ccx.sess().fatal(&format!("symbol `{}` already defined", name))
     } else {
-        let llfn = declare_fn(ccx, name, fn_type);
-        llvm::SetLinkage(llfn, llvm::InternalLinkage);
-        llfn
+        declare_fn(ccx, name, fn_type)
     }
 }
 
+/// Declare a Rust function with an intention to define it.
+///
+/// Use this function when you intend to define a function. This function will
+/// return panic if the name already has a definition associated with it. This
+/// can happen with #[no_mangle] or #[export_name], for example.
+pub fn define_internal_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                                    name: &str,
+                                    fn_type: ty::Ty<'tcx>) -> ValueRef {
+    let llfn = define_fn(ccx, name, fn_type);
+    llvm::SetLinkage(llfn, llvm::InternalLinkage);
+    llfn
+}
+
 
 /// Get declared value by name.
 pub fn get_declared_value(ccx: &CrateContext, name: &str) -> Option<ValueRef> {
index 71c6cba9cc22a6866aa221368c44cf21950a94ff..b8dd7273a8331df4ac0c1fec8dbbe3572c25cf2e 100644 (file)
@@ -1695,11 +1695,13 @@ fn trans_scalar_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 }
 
 // refinement types would obviate the need for this
+#[derive(Clone, Copy)]
 enum lazy_binop_ty {
     lazy_and,
     lazy_or,
 }
 
+
 fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                                 binop_expr: &hir::Expr,
                                 op: lazy_binop_ty,
@@ -1717,6 +1719,17 @@ fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
     }
 
+    // If the rhs can never be reached, don't generate code for it.
+    if let Some(cond_val) = const_to_opt_uint(lhs) {
+        match (cond_val, op) {
+            (0, lazy_and) |
+            (1, lazy_or)  => {
+                return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
+            }
+            _ => { /* continue */ }
+        }
+    }
+
     let join = fcx.new_id_block("join", binop_expr.id);
     let before_rhs = fcx.new_id_block("before_rhs", b.id);
 
index ac23d713d2727936f91b8cdc042b656363920960..ef7d0ea165d609d4487e50ece1e65a2869d61940 100644 (file)
 
 use std;
 
-use attributes;
-use back::symbol_names;
 use llvm;
 use llvm::{ValueRef, get_param};
 use middle::lang_items::ExchangeFreeFnLangItem;
 use rustc::ty::subst::{Substs};
 use rustc::traits;
 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
-use abi::{Abi, FnType};
 use adt;
 use adt::GetDtorType; // for tcx.dtor_type()
 use base::*;
 use callee::{Callee, ArgVals};
 use cleanup;
 use cleanup::CleanupMethods;
-use collector;
 use common::*;
 use debuginfo::DebugLoc;
-use declare;
 use expr;
 use machine::*;
 use monomorphize;
@@ -236,48 +231,43 @@ pub fn map_ty<F>(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>)
 
 fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                 g: DropGlueKind<'tcx>) -> ValueRef {
-    debug!("make drop glue for {:?}", g);
     let g = g.map_ty(|t| get_drop_glue_type(ccx.tcx(), t));
-    debug!("drop glue type {:?}", g);
     match ccx.drop_glues().borrow().get(&g) {
-        Some(&glue) => return glue,
-        _ => { }
+        Some(&(glue, _)) => return glue,
+        None => {
+            debug!("Could not find drop glue for {:?} -- {} -- {}. \
+                    Falling back to on-demand instantiation.",
+                    g,
+                    TransItem::DropGlue(g).to_raw_string(),
+                    ccx.codegen_unit().name);
+
+            ccx.stats().n_fallback_instantiations.set(ccx.stats()
+                                                         .n_fallback_instantiations
+                                                         .get() + 1);
+        }
     }
-    let t = g.ty();
 
-    let tcx = ccx.tcx();
-    let sig = ty::FnSig {
-        inputs: vec![tcx.mk_mut_ptr(tcx.types.i8)],
-        output: ty::FnOutput::FnConverging(tcx.mk_nil()),
-        variadic: false,
-    };
-    // Create a FnType for fn(*mut i8) and substitute the real type in
-    // later - that prevents FnType from splitting fat pointers up.
-    let mut fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]);
-    fn_ty.args[0].original_ty = type_of(ccx, t).ptr_to();
-    let llfnty = fn_ty.llvm_type(ccx);
-
-    // To avoid infinite recursion, don't `make_drop_glue` until after we've
-    // added the entry to the `drop_glues` cache.
-    if let Some(old_sym) = ccx.available_drop_glues().borrow().get(&g) {
-        let llfn = declare::declare_cfn(ccx, &old_sym, llfnty);
-        ccx.drop_glues().borrow_mut().insert(g, llfn);
-        return llfn;
-    };
+    // FIXME: #34151
+    // Normally, getting here would indicate a bug in trans::collector,
+    // since it seems to have missed a translation item. When we are
+    // translating with non-MIR-based trans, however, the results of the
+    // collector are not entirely reliable since it bases its analysis
+    // on MIR. Thus, we'll instantiate the missing function on demand in
+    // this codegen unit, so that things keep working.
 
-    let suffix = match g {
-        DropGlueKind::Ty(_) => "drop",
-        DropGlueKind::TyContents(_) => "drop_contents",
-    };
+    TransItem::DropGlue(g).predefine(ccx, llvm::InternalLinkage);
+    TransItem::DropGlue(g).define(ccx);
 
-    let fn_nm = symbol_names::internal_name_from_type_and_suffix(ccx, t, suffix);
-    assert!(declare::get_defined_value(ccx, &fn_nm).is_none());
-    let llfn = declare::declare_cfn(ccx, &fn_nm, llfnty);
-    attributes::set_frame_pointer_elimination(ccx, llfn);
-    ccx.available_drop_glues().borrow_mut().insert(g, fn_nm);
-    ccx.drop_glues().borrow_mut().insert(g, llfn);
+    // Now that we made sure that the glue function is in ccx.drop_glues,
+    // give it another try
+    get_drop_glue_core(ccx, g)
+}
 
-    let _s = StatRecorder::new(ccx, format!("drop {:?}", t));
+pub fn implement_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                                     g: DropGlueKind<'tcx>) {
+    let tcx = ccx.tcx();
+    assert_eq!(g.ty(), get_drop_glue_type(tcx, g.ty()));
+    let (llfn, fn_ty) = ccx.drop_glues().borrow().get(&g).unwrap().clone();
 
     let (arena, fcx): (TypedArena<_>, FunctionContext);
     arena = TypedArena::new();
@@ -285,8 +275,6 @@ fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let bcx = fcx.init(false, None);
 
-    update_linkage(ccx, llfn, None, OriginalTranslation);
-
     ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
     // All glue functions take values passed *by alias*; this is a
     // requirement since in many contexts glue is invoked indirectly and
@@ -298,10 +286,9 @@ fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let bcx = make_drop_glue(bcx, get_param(llfn, 0), g);
     fcx.finish(bcx, DebugLoc::None);
-
-    llfn
 }
 
+
 fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
                                       t: Ty<'tcx>,
                                       struct_data: ValueRef)
@@ -494,11 +481,6 @@ pub fn size_and_align_of_dst<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>,
 
 fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, g: DropGlueKind<'tcx>)
                               -> Block<'blk, 'tcx> {
-    if collector::collecting_debug_information(bcx.ccx().shared()) {
-        bcx.ccx()
-           .record_translation_item_as_generated(TransItem::DropGlue(g));
-    }
-
     let t = g.ty();
 
     let skip_dtor = match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true };
index af175fbf88256eecb57cebe6f0863d909d94cc0f..4077b894d62d4456a76dc16f1baf83ab83b78f16 100644 (file)
@@ -8,13 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use llvm::{AvailableExternallyLinkage, InternalLinkage, SetLinkage};
 use middle::cstore::{FoundAst, InlinedItem};
 use rustc::hir::def_id::DefId;
-use rustc::ty::subst::Substs;
-use base::{push_ctxt, trans_item, trans_fn};
-use callee::Callee;
+use base::push_ctxt;
 use common::*;
+use monomorphize::Instance;
 
 use rustc::dep_graph::DepNode;
 use rustc::hir;
@@ -52,30 +50,6 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: DefId) -> Option<DefId> {
             ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
 
             ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
-            trans_item(ccx, item);
-
-            if let hir::ItemFn(_, _, _, _, ref generics, _) = item.node {
-                // Generics have no symbol, so they can't be given any linkage.
-                if !generics.is_type_parameterized() {
-                    let linkage = if ccx.sess().opts.cg.codegen_units == 1 {
-                        // We could use AvailableExternallyLinkage here,
-                        // but InternalLinkage allows LLVM to optimize more
-                        // aggressively (at the cost of sometimes
-                        // duplicating code).
-                        InternalLinkage
-                    } else {
-                        // With multiple compilation units, duplicated code
-                        // is more of a problem.  Also, `codegen_units > 1`
-                        // means the user is okay with losing some
-                        // performance.
-                        AvailableExternallyLinkage
-                    };
-                    let empty_substs = tcx.mk_substs(Substs::empty());
-                    let def_id = tcx.map.local_def_id(item.id);
-                    let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                    SetLinkage(llfn, linkage);
-                }
-            }
 
             item.id
         }
@@ -135,35 +109,12 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: DefId) -> Option<DefId> {
             // don't.
             trait_item.id
         }
-        FoundAst::Found(&InlinedItem::ImplItem(impl_did, ref impl_item)) => {
+        FoundAst::Found(&InlinedItem::ImplItem(_, ref impl_item)) => {
             ccx.external().borrow_mut().insert(fn_id, Some(impl_item.id));
             ccx.external_srcs().borrow_mut().insert(impl_item.id, fn_id);
 
             ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
 
-            // Translate monomorphic impl methods immediately.
-            if let hir::ImplItemKind::Method(ref sig, ref body) = impl_item.node {
-                let impl_tpt = tcx.lookup_item_type(impl_did);
-                if impl_tpt.generics.types.is_empty() &&
-                        sig.generics.ty_params.is_empty() {
-                    let def_id = tcx.map.local_def_id(impl_item.id);
-                    let empty_substs = ccx.empty_substs_for_def_id(def_id);
-                    let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                    trans_fn(ccx,
-                             &sig.decl,
-                             body,
-                             llfn,
-                             empty_substs,
-                             impl_item.id);
-                    // See linkage comments on items.
-                    if ccx.sess().opts.cg.codegen_units == 1 {
-                        SetLinkage(llfn, InternalLinkage);
-                    } else {
-                        SetLinkage(llfn, AvailableExternallyLinkage);
-                    }
-                }
-            }
-
             impl_item.id
         }
     };
@@ -184,3 +135,12 @@ pub fn get_local_instance(ccx: &CrateContext, fn_id: DefId)
 pub fn maybe_instantiate_inline(ccx: &CrateContext, fn_id: DefId) -> DefId {
     get_local_instance(ccx, fn_id).unwrap_or(fn_id)
 }
+
+pub fn maybe_inline_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                                       instance: Instance<'tcx>) -> Instance<'tcx> {
+    let def_id = maybe_instantiate_inline(ccx, instance.def);
+    Instance {
+        def: def_id,
+        substs: instance.substs
+    }
+}
index 9cb5d8b6ad62a2e43781bb64b3b7806f5fabe425..fa0a1fdc37523dbff9f012d6a0bb33f82baf93a1 100644 (file)
@@ -122,6 +122,7 @@ pub mod back {
 mod mir;
 mod monomorphize;
 mod partitioning;
+mod symbol_map;
 mod symbol_names_test;
 mod trans_item;
 mod tvec;
@@ -129,8 +130,9 @@ pub mod back {
 mod type_of;
 mod value;
 
-#[derive(Copy, Clone)]
+#[derive(Clone)]
 pub struct ModuleTranslation {
+    pub name: String,
     pub llcx: llvm::ContextRef,
     pub llmod: llvm::ModuleRef,
 }
index 446ac91b1f58086d990aea8eeb3154542a0341de..270033be9375c20029f763de31b8da3816bc930d 100644 (file)
@@ -197,10 +197,13 @@ pub fn trans_consume(&mut self,
                         (OperandValue::Pair(a, b),
                          &mir::ProjectionElem::Field(ref f, ty)) => {
                             let llval = [a, b][f.index()];
-                            return OperandRef {
+                            let op = OperandRef {
                                 val: OperandValue::Immediate(llval),
                                 ty: bcx.monomorphize(&ty)
                             };
+
+                            // Handle nested pairs.
+                            return op.unpack_if_pair(bcx);
                         }
                         _ => {}
                     }
index ab859b88a85972d55200ba32260199974fa94422..00c0e91103500d21a26f02eff544778948f2befd 100644 (file)
@@ -17,7 +17,6 @@
 use rustc::ty::{self, Ty, TypeFoldable, TyCtxt};
 use attributes;
 use base::{push_ctxt};
-use base::trans_fn;
 use base;
 use common::*;
 use declare;
 
 use rustc::hir;
 
-use syntax::attr;
 use errors;
 
 use std::fmt;
+use trans_item::TransItem;
 
 pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                 fn_id: DefId,
                                 psubsts: &'tcx subst::Substs<'tcx>)
                                 -> (ValueRef, Ty<'tcx>) {
     debug!("monomorphic_fn(fn_id={:?}, real_substs={:?})", fn_id, psubsts);
-
     assert!(!psubsts.types.needs_infer() && !psubsts.types.has_param_types());
 
     let _icx = push_ctxt("monomorphic_fn");
@@ -53,6 +51,8 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
     if let Some(&val) = ccx.instances().borrow().get(&instance) {
         debug!("leaving monomorphic fn {:?}", instance);
         return (val, mono_ty);
+    } else {
+        assert!(!ccx.codegen_unit().items.contains_key(&TransItem::Fn(instance)));
     }
 
     debug!("monomorphic_fn({:?})", instance);
@@ -84,9 +84,10 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         monomorphizing.insert(fn_id, depth + 1);
     }
 
-    let symbol = instance.symbol_name(ccx.shared());
+    let symbol = ccx.symbol_map().get_or_compute(ccx.shared(),
+                                                 TransItem::Fn(instance));
 
-    debug!("monomorphize_fn mangled to {}", symbol);
+    debug!("monomorphize_fn mangled to {}", &symbol);
     assert!(declare::get_defined_value(ccx, &symbol).is_none());
 
     // FIXME(nagisa): perhaps needs a more fine grained selection?
@@ -109,33 +110,35 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         });
     match map_node {
         hir_map::NodeItem(&hir::Item {
-            ref attrs, node: hir::ItemFn(ref decl, _, _, _, _, ref body), ..
-        }) |
-        hir_map::NodeTraitItem(&hir::TraitItem {
-            ref attrs, node: hir::MethodTraitItem(
-                hir::MethodSig { ref decl, .. }, Some(ref body)), ..
+            ref attrs,
+            node: hir::ItemFn(..), ..
         }) |
         hir_map::NodeImplItem(&hir::ImplItem {
             ref attrs, node: hir::ImplItemKind::Method(
-                hir::MethodSig { ref decl, .. }, ref body), ..
+                hir::MethodSig { .. }, _), ..
+        }) |
+        hir_map::NodeTraitItem(&hir::TraitItem {
+            ref attrs, node: hir::MethodTraitItem(
+                hir::MethodSig { .. }, Some(_)), ..
         }) => {
-            attributes::from_fn_attrs(ccx, attrs, lldecl);
-
-            let is_first = !ccx.available_monomorphizations().borrow()
-                                                             .contains(&symbol);
-            if is_first {
-                ccx.available_monomorphizations().borrow_mut().insert(symbol.clone());
-            }
+            let trans_item = TransItem::Fn(instance);
 
-            let trans_everywhere = attr::requests_inline(attrs);
-            if trans_everywhere || is_first {
-                let origin = if is_first { base::OriginalTranslation } else { base::InlinedCopy };
-                base::update_linkage(ccx, lldecl, None, origin);
-                trans_fn(ccx, decl, body, lldecl, psubsts, fn_node_id);
-            } else {
-                // We marked the value as using internal linkage earlier, but that is illegal for
-                // declarations, so switch back to external linkage.
+            if ccx.shared().translation_items().borrow().contains(&trans_item) {
+                attributes::from_fn_attrs(ccx, attrs, lldecl);
                 llvm::SetLinkage(lldecl, llvm::ExternalLinkage);
+            } else {
+                // FIXME: #34151
+                // Normally, getting here would indicate a bug in trans::collector,
+                // since it seems to have missed a translation item. When we are
+                // translating with non-MIR based trans, however, the results of
+                // the collector are not entirely reliable since it bases its
+                // analysis on MIR. Thus, we'll instantiate the missing function
+                // privately in this codegen unit, so that things keep working.
+                ccx.stats().n_fallback_instantiations.set(ccx.stats()
+                                                             .n_fallback_instantiations
+                                                             .get() + 1);
+                trans_item.predefine(ccx, llvm::InternalLinkage);
+                trans_item.define(ccx);
             }
         }
 
index 2ded643ef4fdd309da56c954925557c879a8f2b1..8073359ede87ebc058689e317e484ac862390aac 100644 (file)
 use monomorphize;
 use rustc::hir::def_id::DefId;
 use rustc::hir::map::DefPathData;
+use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
 use rustc::ty::TyCtxt;
 use rustc::ty::item_path::characteristic_def_id_of_type;
+use std::cmp::Ordering;
+use symbol_map::SymbolMap;
+use syntax::ast::NodeId;
 use syntax::parse::token::{self, InternedString};
 use trans_item::TransItem;
-use util::nodemap::{FnvHashMap, FnvHashSet};
-
-pub struct CodegenUnit<'tcx> {
-    pub name: InternedString,
-    pub items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
-}
+use util::nodemap::{FnvHashMap, FnvHashSet, NodeSet};
 
 pub enum PartitioningStrategy {
     /// Generate one codegen unit per source-level module.
@@ -140,25 +139,95 @@ pub enum PartitioningStrategy {
     FixedUnitCount(usize)
 }
 
+pub struct CodegenUnit<'tcx> {
+    pub name: InternedString,
+    pub items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
+}
+
+impl<'tcx> CodegenUnit<'tcx> {
+    pub fn items_in_deterministic_order(&self,
+                                        tcx: TyCtxt,
+                                        symbol_map: &SymbolMap)
+                                        -> Vec<(TransItem<'tcx>, llvm::Linkage)> {
+        let mut items: Vec<(TransItem<'tcx>, llvm::Linkage)> =
+            self.items.iter().map(|(item, linkage)| (*item, *linkage)).collect();
+
+        // The codegen tests rely on items being process in the same order as
+        // they appear in the file, so for local items, we sort by node_id first
+        items.sort_by(|&(trans_item1, _), &(trans_item2, _)| {
+            let node_id1 = local_node_id(tcx, trans_item1);
+            let node_id2 = local_node_id(tcx, trans_item2);
+
+            match (node_id1, node_id2) {
+                (None, None) => {
+                    let symbol_name1 = symbol_map.get(trans_item1).unwrap();
+                    let symbol_name2 = symbol_map.get(trans_item2).unwrap();
+                    symbol_name1.cmp(symbol_name2)
+                }
+                // In the following two cases we can avoid looking up the symbol
+                (None, Some(_)) => Ordering::Less,
+                (Some(_), None) => Ordering::Greater,
+                (Some(node_id1), Some(node_id2)) => {
+                    let ordering = node_id1.cmp(&node_id2);
+
+                    if ordering != Ordering::Equal {
+                        return ordering;
+                    }
+
+                    let symbol_name1 = symbol_map.get(trans_item1).unwrap();
+                    let symbol_name2 = symbol_map.get(trans_item2).unwrap();
+                    symbol_name1.cmp(symbol_name2)
+                }
+            }
+        });
+
+        return items;
+
+        fn local_node_id(tcx: TyCtxt, trans_item: TransItem) -> Option<NodeId> {
+            match trans_item {
+                TransItem::Fn(instance) => {
+                    tcx.map.as_local_node_id(instance.def)
+                }
+                TransItem::Static(node_id) => Some(node_id),
+                TransItem::DropGlue(_) => None,
+            }
+        }
+    }
+}
+
+
 // Anything we can't find a proper codegen unit for goes into this.
 const FALLBACK_CODEGEN_UNIT: &'static str = "__rustc_fallback_codegen_unit";
 
 pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                               trans_items: I,
                               strategy: PartitioningStrategy,
-                              inlining_map: &InliningMap<'tcx>)
+                              inlining_map: &InliningMap<'tcx>,
+                              reachable: &NodeSet)
                               -> Vec<CodegenUnit<'tcx>>
     where I: Iterator<Item = TransItem<'tcx>>
 {
+    if let PartitioningStrategy::FixedUnitCount(1) = strategy {
+        // If there is only a single codegen-unit, we can use a very simple
+        // scheme and don't have to bother with doing much analysis.
+        return vec![single_codegen_unit(tcx, trans_items, reachable)];
+    }
+
     // In the first step, we place all regular translation items into their
     // respective 'home' codegen unit. Regular translation items are all
     // functions and statics defined in the local crate.
-    let mut initial_partitioning = place_root_translation_items(tcx, trans_items);
+    let mut initial_partitioning = place_root_translation_items(tcx,
+                                                                trans_items,
+                                                                reachable);
+
+    debug_dump(tcx, "INITIAL PARTITONING:", initial_partitioning.codegen_units.iter());
 
     // If the partitioning should produce a fixed count of codegen units, merge
     // until that count is reached.
     if let PartitioningStrategy::FixedUnitCount(count) = strategy {
         merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name[..]);
+
+        debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter());
     }
 
     // In the next step, we use the inlining map to determine which addtional
@@ -167,7 +236,16 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // local functions the definition of which is marked with #[inline].
     let post_inlining = place_inlined_translation_items(initial_partitioning,
                                                         inlining_map);
-    post_inlining.0
+
+    debug_dump(tcx, "POST INLINING:", post_inlining.0.iter());
+
+    // Finally, sort by codegen unit name, so that we get deterministic results
+    let mut result = post_inlining.0;
+    result.sort_by(|cgu1, cgu2| {
+        (&cgu1.name[..]).cmp(&cgu2.name[..])
+    });
+
+    result
 }
 
 struct PreInliningPartitioning<'tcx> {
@@ -178,7 +256,8 @@ struct PreInliningPartitioning<'tcx> {
 struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
 
 fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                             trans_items: I)
+                                             trans_items: I,
+                                             _reachable: &NodeSet)
                                              -> PreInliningPartitioning<'tcx>
     where I: Iterator<Item = TransItem<'tcx>>
 {
@@ -186,15 +265,11 @@ fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let mut codegen_units = FnvHashMap();
 
     for trans_item in trans_items {
-        let is_root = match trans_item {
-            TransItem::Static(..) => true,
-            TransItem::DropGlue(..) => false,
-            TransItem::Fn(_) => !trans_item.is_from_extern_crate(),
-        };
+        let is_root = !trans_item.is_instantiated_only_on_demand();
 
         if is_root {
             let characteristic_def_id = characteristic_def_id_of_trans_item(tcx, trans_item);
-            let is_volatile = trans_item.is_lazily_instantiated();
+            let is_volatile = trans_item.is_generic_fn();
 
             let codegen_unit_name = match characteristic_def_id {
                 Some(def_id) => compute_codegen_unit_name(tcx, def_id, is_volatile),
@@ -218,7 +293,18 @@ fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                         TransItem::Static(..) => llvm::ExternalLinkage,
                         TransItem::DropGlue(..) => unreachable!(),
                         // Is there any benefit to using ExternalLinkage?:
-                        TransItem::Fn(..) => llvm::WeakODRLinkage,
+                        TransItem::Fn(ref instance) => {
+                            if instance.substs.types.is_empty() {
+                                // This is a non-generic functions, we always
+                                // make it visible externally on the chance that
+                                // it might be used in another codegen unit.
+                                llvm::ExternalLinkage
+                            } else {
+                                // In the current setup, generic functions cannot
+                                // be roots.
+                                unreachable!()
+                            }
+                        }
                     }
                 }
             };
@@ -258,7 +344,7 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
     // translation items in a given unit. This could be improved on.
     while codegen_units.len() > target_cgu_count {
         // Sort small cgus to the back
-        codegen_units.as_mut_slice().sort_by_key(|cgu| -(cgu.items.len() as i64));
+        codegen_units.sort_by_key(|cgu| -(cgu.items.len() as i64));
         let smallest = codegen_units.pop().unwrap();
         let second_smallest = codegen_units.last_mut().unwrap();
 
@@ -281,10 +367,6 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
             items: FnvHashMap()
         });
     }
-
-    fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
-        token::intern_and_get_ident(&format!("{}.{}", crate_name, index)[..])
-    }
 }
 
 fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartitioning<'tcx>,
@@ -309,20 +391,30 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
             if let Some(linkage) = codegen_unit.items.get(&trans_item) {
                 // This is a root, just copy it over
                 new_codegen_unit.items.insert(trans_item, *linkage);
+            } else if initial_partitioning.roots.contains(&trans_item) {
+                // This item will be instantiated in some other codegen unit,
+                // so we just add it here with AvailableExternallyLinkage
+                // FIXME(mw): I have not seen it happening yet but having
+                //            available_externally here could potentially lead
+                //            to the same problem with exception handling tables
+                //            as in the case below.
+                new_codegen_unit.items.insert(trans_item,
+                                              llvm::AvailableExternallyLinkage);
+            } else if trans_item.is_from_extern_crate() && !trans_item.is_generic_fn() {
+                // FIXME(mw): It would be nice if we could mark these as
+                // `AvailableExternallyLinkage`, since they should have
+                // been instantiated in the extern crate. But this
+                // sometimes leads to crashes on Windows because LLVM
+                // does not handle exception handling table instantiation
+                // reliably in that case.
+                new_codegen_unit.items.insert(trans_item, llvm::InternalLinkage);
             } else {
-                if initial_partitioning.roots.contains(&trans_item) {
-                    // This item will be instantiated in some other codegen unit,
-                    // so we just add it here with AvailableExternallyLinkage
-                    new_codegen_unit.items.insert(trans_item,
-                                                  llvm::AvailableExternallyLinkage);
-                } else {
-                    // We can't be sure if this will also be instantiated
-                    // somewhere else, so we add an instance here with
-                    // LinkOnceODRLinkage. That way the item can be discarded if
-                    // it's not needed (inlined) after all.
-                    new_codegen_unit.items.insert(trans_item,
-                                                  llvm::LinkOnceODRLinkage);
-                }
+                assert!(trans_item.is_instantiated_only_on_demand());
+                // We can't be sure if this will also be instantiated
+                // somewhere else, so we add an instance here with
+                // InternalLinkage so we don't get any conflicts.
+                new_codegen_unit.items.insert(trans_item,
+                                              llvm::InternalLinkage);
             }
         }
 
@@ -410,3 +502,93 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
     return token::intern_and_get_ident(&mod_path[..]);
 }
+
+fn single_codegen_unit<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                    trans_items: I,
+                                    reachable: &NodeSet)
+                                    -> CodegenUnit<'tcx>
+    where I: Iterator<Item = TransItem<'tcx>>
+{
+    let mut items = FnvHashMap();
+
+    for trans_item in trans_items {
+        let linkage = trans_item.explicit_linkage(tcx).unwrap_or_else(|| {
+            match trans_item {
+                TransItem::Static(node_id) => {
+                    if reachable.contains(&node_id) {
+                        llvm::ExternalLinkage
+                    } else {
+                        llvm::PrivateLinkage
+                    }
+                }
+                TransItem::DropGlue(_) => {
+                    llvm::InternalLinkage
+                }
+                TransItem::Fn(instance) => {
+                    if trans_item.is_generic_fn() {
+                        // FIXME(mw): Assigning internal linkage to all
+                        // monomorphizations is potentially a waste of space
+                        // since monomorphizations could be shared between
+                        // crates. The main reason for making them internal is
+                        // a limitation in MingW's binutils that cannot deal
+                        // with COFF object that have more than 2^15 sections,
+                        // which is something that can happen for large programs
+                        // when every function gets put into its own COMDAT
+                        // section.
+                        llvm::InternalLinkage
+                    } else if trans_item.is_from_extern_crate() {
+                        // FIXME(mw): It would be nice if we could mark these as
+                        // `AvailableExternallyLinkage`, since they should have
+                        // been instantiated in the extern crate. But this
+                        // sometimes leads to crashes on Windows because LLVM
+                        // does not handle exception handling table instantiation
+                        // reliably in that case.
+                        llvm::InternalLinkage
+                    } else if reachable.contains(&tcx.map
+                                                     .as_local_node_id(instance.def)
+                                                     .unwrap()) {
+                        llvm::ExternalLinkage
+                    } else {
+                        // Functions that are not visible outside this crate can
+                        // be marked as internal.
+                        llvm::InternalLinkage
+                    }
+                }
+            }
+        });
+
+        items.insert(trans_item, linkage);
+    }
+
+    CodegenUnit {
+        name: numbered_codegen_unit_name(&tcx.crate_name[..], 0),
+        items: items
+    }
+}
+
+fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
+    token::intern_and_get_ident(&format!("{}{}{}",
+        crate_name,
+        NUMBERED_CODEGEN_UNIT_MARKER,
+        index)[..])
+}
+
+fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                               label: &str,
+                               cgus: I)
+    where I: Iterator<Item=&'b CodegenUnit<'tcx>>,
+          'tcx: 'a + 'b
+{
+    if cfg!(debug_assertions) {
+        debug!("{}", label);
+        for cgu in cgus {
+            debug!("CodegenUnit {}:", cgu.name);
+
+            for (trans_item, linkage) in &cgu.items {
+                debug!(" - {} [{:?}]", trans_item.to_string(tcx), linkage);
+            }
+
+            debug!("");
+        }
+    }
+}
diff --git a/src/librustc_trans/symbol_map.rs b/src/librustc_trans/symbol_map.rs
new file mode 100644 (file)
index 0000000..3faaa08
--- /dev/null
@@ -0,0 +1,128 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use context::SharedCrateContext;
+use monomorphize::Instance;
+use rustc::ty::TyCtxt;
+use std::borrow::Cow;
+use syntax::codemap::Span;
+use trans_item::TransItem;
+use util::nodemap::FnvHashMap;
+
+// In the SymbolMap we collect the symbol names of all translation items of
+// the current crate. This map exists as a performance optimization. Symbol
+// names of translation items are deterministic and fully defined by the item.
+// Thus they could also always be recomputed if needed.
+
+pub struct SymbolMap<'tcx> {
+    index: FnvHashMap<TransItem<'tcx>, (usize, usize)>,
+    arena: String,
+}
+
+impl<'tcx> SymbolMap<'tcx> {
+
+    pub fn build<'a, I>(scx: &SharedCrateContext<'a, 'tcx>,
+                        trans_items: I)
+                        -> SymbolMap<'tcx>
+        where I: Iterator<Item=TransItem<'tcx>>
+    {
+        // Check for duplicate symbol names
+        let mut symbols: Vec<_> = trans_items.map(|trans_item| {
+            (trans_item, trans_item.compute_symbol_name(scx))
+        }).collect();
+
+        (&mut symbols[..]).sort_by(|&(_, ref sym1), &(_, ref sym2)|{
+            sym1.cmp(sym2)
+        });
+
+        for pair in (&symbols[..]).windows(2) {
+            let sym1 = &pair[0].1;
+            let sym2 = &pair[1].1;
+
+            if *sym1 == *sym2 {
+                let trans_item1 = pair[0].0;
+                let trans_item2 = pair[1].0;
+
+                let span1 = get_span(scx.tcx(), trans_item1);
+                let span2 = get_span(scx.tcx(), trans_item2);
+
+                // Deterministically select one of the spans for error reporting
+                let span = match (span1, span2) {
+                    (Some(span1), Some(span2)) => {
+                        Some(if span1.lo.0 > span2.lo.0 {
+                            span1
+                        } else {
+                            span2
+                        })
+                    }
+                    (Some(span), None) |
+                    (None, Some(span)) => Some(span),
+                    _ => None
+                };
+
+                let error_message = format!("symbol `{}` is already defined", sym1);
+
+                if let Some(span) = span {
+                    scx.sess().span_fatal(span, &error_message)
+                } else {
+                    scx.sess().fatal(&error_message)
+                }
+            }
+        }
+
+        let mut symbol_map = SymbolMap {
+            index: FnvHashMap(),
+            arena: String::with_capacity(1024),
+        };
+
+        for (trans_item, symbol) in symbols {
+            let start_index = symbol_map.arena.len();
+            symbol_map.arena.push_str(&symbol[..]);
+            let end_index = symbol_map.arena.len();
+            let prev_entry = symbol_map.index.insert(trans_item,
+                                                     (start_index, end_index));
+            if prev_entry.is_some() {
+                bug!("TransItem encountered twice?")
+            }
+        }
+
+        fn get_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                              trans_item: TransItem<'tcx>) -> Option<Span> {
+            match trans_item {
+                TransItem::Fn(Instance { def, .. }) => {
+                    tcx.map.as_local_node_id(def)
+                }
+                TransItem::Static(node_id) => Some(node_id),
+                TransItem::DropGlue(_) => None,
+            }.map(|node_id| {
+                tcx.map.span(node_id)
+            })
+        }
+
+        symbol_map
+    }
+
+    pub fn get(&self, trans_item: TransItem<'tcx>) -> Option<&str> {
+        self.index.get(&trans_item).map(|&(start_index, end_index)| {
+            &self.arena[start_index .. end_index]
+        })
+    }
+
+    pub fn get_or_compute<'map, 'scx>(&'map self,
+                                      scx: &SharedCrateContext<'scx, 'tcx>,
+                                      trans_item: TransItem<'tcx>)
+                                      -> Cow<'map, str> {
+        if let Some(sym) = self.get(trans_item) {
+            Cow::from(sym)
+        } else {
+            Cow::from(trans_item.compute_symbol_name(scx))
+        }
+    }
+}
index 11e9e9f3204a28a3b9e0860e3434596a9dbfcf61..9a7fe54e0d9f5a9f3dc55cfb96e9efc8af1294e2 100644 (file)
 use syntax::ast;
 use syntax::attr::AttrMetaMethods;
 
-use common::CrateContext;
+use common::SharedCrateContext;
 use monomorphize::Instance;
 
 const SYMBOL_NAME: &'static str = "rustc_symbol_name";
 const ITEM_PATH: &'static str = "rustc_item_path";
 
-pub fn report_symbol_names(ccx: &CrateContext) {
+pub fn report_symbol_names(scx: &SharedCrateContext) {
     // if the `rustc_attrs` feature is not enabled, then the
     // attributes we are interested in cannot be present anyway, so
     // skip the walk.
-    let tcx = ccx.tcx();
+    let tcx = scx.tcx();
     if !tcx.sess.features.borrow().rustc_attrs {
         return;
     }
 
     let _ignore = tcx.dep_graph.in_ignore();
-    let mut visitor = SymbolNamesTest { ccx: ccx };
+    let mut visitor = SymbolNamesTest { scx: scx };
     tcx.map.krate().visit_all_items(&mut visitor);
 }
 
 struct SymbolNamesTest<'a, 'tcx:'a> {
-    ccx: &'a CrateContext<'a, 'tcx>,
+    scx: &'a SharedCrateContext<'a, 'tcx>,
 }
 
 impl<'a, 'tcx> SymbolNamesTest<'a, 'tcx> {
     fn process_attrs(&mut self,
                      node_id: ast::NodeId) {
-        let tcx = self.ccx.tcx();
+        let tcx = self.scx.tcx();
         let def_id = tcx.map.local_def_id(node_id);
         for attr in tcx.get_attrs(def_id).iter() {
             if attr.check_name(SYMBOL_NAME) {
                 // for now, can only use on monomorphic names
-                let instance = Instance::mono(self.ccx.shared(), def_id);
-                let name = instance.symbol_name(self.ccx.shared());
+                let instance = Instance::mono(self.scx, def_id);
+                let name = instance.symbol_name(self.scx);
                 tcx.sess.span_err(attr.span, &format!("symbol-name({})", name));
             } else if attr.check_name(ITEM_PATH) {
                 let path = tcx.item_path_str(def_id);
index d7c5c41a156ba4c239780301a87881fc42087953..b7b18b2631bee91f5148405b28713265fa0b9758 100644 (file)
 //! item-path. This is used for unit testing the code that generates
 //! paths etc in all kinds of annoying scenarios.
 
-use base::llvm_linkage_by_name;
+use attributes;
+use base;
+use consts;
+use context::{CrateContext, SharedCrateContext};
+use declare;
 use glue::DropGlueKind;
 use llvm;
-use monomorphize::Instance;
+use monomorphize::{self, Instance};
+use inline;
 use rustc::hir;
+use rustc::hir::map as hir_map;
 use rustc::hir::def_id::DefId;
-use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
 use rustc::ty::subst;
 use std::hash::{Hash, Hasher};
 use syntax::ast::{self, NodeId};
-use syntax::attr;
+use syntax::{attr,errors};
 use syntax::parse::token;
+use type_of;
+use glue;
+use abi::{Abi, FnType};
+use back::symbol_names;
 
 #[derive(PartialEq, Eq, Clone, Copy, Debug)]
 pub enum TransItem<'tcx> {
@@ -54,6 +64,314 @@ fn hash<H: Hasher>(&self, s: &mut H) {
     }
 }
 
+impl<'a, 'tcx> TransItem<'tcx> {
+
+    pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
+
+        debug!("BEGIN IMPLEMENTING '{} ({})' in cgu {}",
+                  self.to_string(ccx.tcx()),
+                  self.to_raw_string(),
+                  ccx.codegen_unit().name);
+
+        match *self {
+            TransItem::Static(node_id) => {
+                let item = ccx.tcx().map.expect_item(node_id);
+                if let hir::ItemStatic(_, m, ref expr) = item.node {
+                    match consts::trans_static(&ccx, m, expr, item.id, &item.attrs) {
+                        Ok(_) => { /* Cool, everything's alright. */ },
+                        Err(err) => ccx.tcx().sess.span_fatal(expr.span, &err.description()),
+                    };
+                } else {
+                    span_bug!(item.span, "Mismatch between hir::Item type and TransItem type")
+                }
+            }
+            TransItem::Fn(instance) => {
+                base::trans_instance(&ccx, instance);
+            }
+            TransItem::DropGlue(dg) => {
+                glue::implement_drop_glue(&ccx, dg);
+            }
+        }
+
+        debug!("END IMPLEMENTING '{} ({})' in cgu {}",
+               self.to_string(ccx.tcx()),
+               self.to_raw_string(),
+               ccx.codegen_unit().name);
+    }
+
+    pub fn predefine(&self,
+                     ccx: &CrateContext<'a, 'tcx>,
+                     linkage: llvm::Linkage) {
+        debug!("BEGIN PREDEFINING '{} ({})' in cgu {}",
+               self.to_string(ccx.tcx()),
+               self.to_raw_string(),
+               ccx.codegen_unit().name);
+
+        let symbol_name = ccx.symbol_map()
+                             .get_or_compute(ccx.shared(), *self);
+
+        debug!("symbol {}", &symbol_name);
+
+        match *self {
+            TransItem::Static(node_id) => {
+                TransItem::predefine_static(ccx, node_id, linkage, &symbol_name);
+            }
+            TransItem::Fn(instance) => {
+                TransItem::predefine_fn(ccx, instance, linkage, &symbol_name);
+            }
+            TransItem::DropGlue(dg) => {
+                TransItem::predefine_drop_glue(ccx, dg, linkage, &symbol_name);
+            }
+        }
+
+        debug!("END PREDEFINING '{} ({})' in cgu {}",
+               self.to_string(ccx.tcx()),
+               self.to_raw_string(),
+               ccx.codegen_unit().name);
+    }
+
+    fn predefine_static(ccx: &CrateContext<'a, 'tcx>,
+                        node_id: ast::NodeId,
+                        linkage: llvm::Linkage,
+                        symbol_name: &str) {
+        let def_id = ccx.tcx().map.local_def_id(node_id);
+        let ty = ccx.tcx().lookup_item_type(def_id).ty;
+        let llty = type_of::type_of(ccx, ty);
+
+        match ccx.tcx().map.get(node_id) {
+            hir::map::NodeItem(&hir::Item {
+                span, node: hir::ItemStatic(..), ..
+            }) => {
+                let g = declare::define_global(ccx, symbol_name, llty).unwrap_or_else(|| {
+                    ccx.sess().span_fatal(span,
+                        &format!("symbol `{}` is already defined", symbol_name))
+                });
+
+                llvm::SetLinkage(g, linkage);
+            }
+
+            item => bug!("predefine_static: expected static, found {:?}", item)
+        }
+    }
+
+    fn predefine_fn(ccx: &CrateContext<'a, 'tcx>,
+                    instance: Instance<'tcx>,
+                    linkage: llvm::Linkage,
+                    symbol_name: &str) {
+        assert!(!instance.substs.types.needs_infer() &&
+                !instance.substs.types.has_param_types());
+
+        let instance = inline::maybe_inline_instance(ccx, instance);
+
+        let item_ty = ccx.tcx().lookup_item_type(instance.def).ty;
+        let item_ty = ccx.tcx().erase_regions(&item_ty);
+        let mono_ty = monomorphize::apply_param_substs(ccx.tcx(), instance.substs, &item_ty);
+
+        let fn_node_id = ccx.tcx().map.as_local_node_id(instance.def).unwrap();
+        let map_node = errors::expect(
+            ccx.sess().diagnostic(),
+            ccx.tcx().map.find(fn_node_id),
+            || {
+                format!("while instantiating `{}`, couldn't find it in \
+                     the item map (may have attempted to monomorphize \
+                     an item defined in a different crate?)",
+                    instance)
+            });
+
+        match map_node {
+            hir_map::NodeItem(&hir::Item {
+                ref attrs, node: hir::ItemFn(..), ..
+            }) |
+            hir_map::NodeTraitItem(&hir::TraitItem {
+                ref attrs, node: hir::MethodTraitItem(..), ..
+            }) |
+            hir_map::NodeImplItem(&hir::ImplItem {
+                ref attrs, node: hir::ImplItemKind::Method(..), ..
+            }) => {
+                let lldecl = declare::declare_fn(ccx, symbol_name, mono_ty);
+                llvm::SetLinkage(lldecl, linkage);
+                base::set_link_section(ccx, lldecl, attrs);
+                if linkage == llvm::LinkOnceODRLinkage ||
+                   linkage == llvm::WeakODRLinkage {
+                    llvm::SetUniqueComdat(ccx.llmod(), lldecl);
+                }
+
+                attributes::from_fn_attrs(ccx, attrs, lldecl);
+                ccx.instances().borrow_mut().insert(instance, lldecl);
+            }
+            _ => bug!("Invalid item for TransItem::Fn: `{:?}`", map_node)
+        };
+
+    }
+
+    fn predefine_drop_glue(ccx: &CrateContext<'a, 'tcx>,
+                           dg: glue::DropGlueKind<'tcx>,
+                           linkage: llvm::Linkage,
+                           symbol_name: &str) {
+        let tcx = ccx.tcx();
+        assert_eq!(dg.ty(), glue::get_drop_glue_type(tcx, dg.ty()));
+        let t = dg.ty();
+
+        let sig = ty::FnSig {
+            inputs: vec![tcx.mk_mut_ptr(tcx.types.i8)],
+            output: ty::FnOutput::FnConverging(tcx.mk_nil()),
+            variadic: false,
+        };
+
+        // Create a FnType for fn(*mut i8) and substitute the real type in
+        // later - that prevents FnType from splitting fat pointers up.
+        let mut fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]);
+        fn_ty.args[0].original_ty = type_of::type_of(ccx, t).ptr_to();
+        let llfnty = fn_ty.llvm_type(ccx);
+
+        assert!(declare::get_defined_value(ccx, symbol_name).is_none());
+        let llfn = declare::declare_cfn(ccx, symbol_name, llfnty);
+        llvm::SetLinkage(llfn, linkage);
+        if linkage == llvm::LinkOnceODRLinkage ||
+           linkage == llvm::WeakODRLinkage {
+            llvm::SetUniqueComdat(ccx.llmod(), llfn);
+        }
+        attributes::set_frame_pointer_elimination(ccx, llfn);
+        ccx.drop_glues().borrow_mut().insert(dg, (llfn, fn_ty));
+    }
+
+    pub fn compute_symbol_name(&self,
+                               scx: &SharedCrateContext<'a, 'tcx>) -> String {
+        match *self {
+            TransItem::Fn(instance) => instance.symbol_name(scx),
+            TransItem::Static(node_id) => {
+                let def_id = scx.tcx().map.local_def_id(node_id);
+                Instance::mono(scx, def_id).symbol_name(scx)
+            }
+            TransItem::DropGlue(dg) => {
+                let prefix = match dg {
+                    DropGlueKind::Ty(_) => "drop",
+                    DropGlueKind::TyContents(_) => "drop_contents",
+                };
+                symbol_names::exported_name_from_type_and_prefix(scx, dg.ty(), prefix)
+            }
+        }
+    }
+
+    pub fn requests_inline(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => {
+                !instance.substs.types.is_empty() || {
+                    let attributes = tcx.get_attrs(instance.def);
+                    attr::requests_inline(&attributes[..])
+                }
+            }
+            TransItem::DropGlue(..) => true,
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn is_from_extern_crate(&self) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => !instance.def.is_local(),
+            TransItem::DropGlue(..) |
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn is_instantiated_only_on_demand(&self) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => !instance.def.is_local() ||
+                                           !instance.substs.types.is_empty(),
+            TransItem::DropGlue(..) => true,
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn is_generic_fn(&self) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => !instance.substs.types.is_empty(),
+            TransItem::DropGlue(..) |
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
+        let def_id = match *self {
+            TransItem::Fn(ref instance) => instance.def,
+            TransItem::Static(node_id) => tcx.map.local_def_id(node_id),
+            TransItem::DropGlue(..) => return None,
+        };
+
+        let attributes = tcx.get_attrs(def_id);
+        if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
+            if let Some(linkage) = base::llvm_linkage_by_name(&name) {
+                Some(linkage)
+            } else {
+                let span = tcx.map.span_if_local(def_id);
+                if let Some(span) = span {
+                    tcx.sess.span_fatal(span, "invalid linkage specified")
+                } else {
+                    tcx.sess.fatal(&format!("invalid linkage specified: {}", name))
+                }
+            }
+        } else {
+            None
+        }
+    }
+
+    pub fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
+        let hir_map = &tcx.map;
+
+        return match *self {
+            TransItem::DropGlue(dg) => {
+                let mut s = String::with_capacity(32);
+                match dg {
+                    DropGlueKind::Ty(_) => s.push_str("drop-glue "),
+                    DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "),
+                };
+                push_unique_type_name(tcx, dg.ty(), &mut s);
+                s
+            }
+            TransItem::Fn(instance) => {
+                to_string_internal(tcx, "fn ", instance)
+            },
+            TransItem::Static(node_id) => {
+                let def_id = hir_map.local_def_id(node_id);
+                let instance = Instance::new(def_id,
+                                             tcx.mk_substs(subst::Substs::empty()));
+                to_string_internal(tcx, "static ", instance)
+            },
+        };
+
+        fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                        prefix: &str,
+                                        instance: Instance<'tcx>)
+                                        -> String {
+            let mut result = String::with_capacity(32);
+            result.push_str(prefix);
+            push_instance_as_string(tcx, instance, &mut result);
+            result
+        }
+    }
+
+    pub fn to_raw_string(&self) -> String {
+        match *self {
+            TransItem::DropGlue(dg) => {
+                let prefix = match dg {
+                    DropGlueKind::Ty(_) => "Ty",
+                    DropGlueKind::TyContents(_) => "TyContents",
+                };
+                format!("DropGlue({}: {})", prefix, dg.ty() as *const _ as usize)
+            }
+            TransItem::Fn(instance) => {
+                format!("Fn({:?}, {})",
+                         instance.def,
+                         instance.substs as *const _ as usize)
+            }
+            TransItem::Static(id) => {
+                format!("Static({:?})", id)
+            }
+        }
+    }
+}
+
+
 //=-----------------------------------------------------------------------------
 // TransItem String Keys
 //=-----------------------------------------------------------------------------
@@ -277,108 +595,3 @@ pub fn type_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     push_unique_type_name(tcx, ty, &mut output);
     output
 }
-
-impl<'tcx> TransItem<'tcx> {
-
-    pub fn requests_inline<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
-        match *self {
-            TransItem::Fn(ref instance) => {
-                let attributes = tcx.get_attrs(instance.def);
-                attr::requests_inline(&attributes[..])
-            }
-            TransItem::DropGlue(..) => true,
-            TransItem::Static(..)   => false,
-        }
-    }
-
-    pub fn is_from_extern_crate(&self) -> bool {
-        match *self {
-            TransItem::Fn(ref instance) => !instance.def.is_local(),
-            TransItem::DropGlue(..) |
-            TransItem::Static(..)   => false,
-        }
-    }
-
-    pub fn is_lazily_instantiated(&self) -> bool {
-        match *self {
-            TransItem::Fn(ref instance) => !instance.substs.types.is_empty(),
-            TransItem::DropGlue(..) => true,
-            TransItem::Static(..)   => false,
-        }
-    }
-
-    pub fn explicit_linkage<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
-        let def_id = match *self {
-            TransItem::Fn(ref instance) => instance.def,
-            TransItem::Static(node_id) => tcx.map.local_def_id(node_id),
-            TransItem::DropGlue(..) => return None,
-        };
-
-        let attributes = tcx.get_attrs(def_id);
-        if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
-            if let Some(linkage) = llvm_linkage_by_name(&name) {
-                Some(linkage)
-            } else {
-                let span = tcx.map.span_if_local(def_id);
-                if let Some(span) = span {
-                    tcx.sess.span_fatal(span, "invalid linkage specified")
-                } else {
-                    tcx.sess.fatal(&format!("invalid linkage specified: {}", name))
-                }
-            }
-        } else {
-            None
-        }
-    }
-
-    pub fn to_string<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
-        let hir_map = &tcx.map;
-
-        return match *self {
-            TransItem::DropGlue(dg) => {
-                let mut s = String::with_capacity(32);
-                match dg {
-                    DropGlueKind::Ty(_) => s.push_str("drop-glue "),
-                    DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "),
-                };
-                push_unique_type_name(tcx, dg.ty(), &mut s);
-                s
-            }
-            TransItem::Fn(instance) => {
-                to_string_internal(tcx, "fn ", instance)
-            },
-            TransItem::Static(node_id) => {
-                let def_id = hir_map.local_def_id(node_id);
-                let empty_substs = tcx.mk_substs(subst::Substs::empty());
-                let instance = Instance::new(def_id, empty_substs);
-                to_string_internal(tcx, "static ", instance)
-            },
-        };
-
-        fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                        prefix: &str,
-                                        instance: Instance<'tcx>)
-                                        -> String {
-            let mut result = String::with_capacity(32);
-            result.push_str(prefix);
-            push_instance_as_string(tcx, instance, &mut result);
-            result
-        }
-    }
-
-    pub fn to_raw_string(&self) -> String {
-        match *self {
-            TransItem::DropGlue(dg) => {
-                format!("DropGlue({})", dg.ty() as *const _ as usize)
-            }
-            TransItem::Fn(instance) => {
-                format!("Fn({:?}, {})",
-                         instance.def,
-                         instance.substs as *const _ as usize)
-            }
-            TransItem::Static(id) => {
-                format!("Static({:?})", id)
-            }
-        }
-    }
-}
index 088ac1aac1a4016926e9507b8790a43d6e3eb335..9ff30f9ede26295d85f4e94c853d817ce3bf3d84 100644 (file)
@@ -53,7 +53,7 @@
 use rustc_const_eval::EvalHint::UncheckedExprHint;
 use rustc_const_eval::ErrKind::ErroneousReferencedConstant;
 use hir::{self, SelfKind};
-use hir::def::{self, Def};
+use hir::def::{Def, PathResolution};
 use hir::def_id::DefId;
 use hir::print as pprust;
 use middle::resolve_lifetime as rl;
@@ -1327,7 +1327,7 @@ fn associated_path_def_to_ty(&self,
                 };
 
                 if self.ensure_super_predicates(span, trait_did).is_err() {
-                    return (tcx.types.err, ty_path_def);
+                    return (tcx.types.err, Def::Err);
                 }
 
                 let candidates: Vec<ty::PolyTraitRef> =
@@ -1341,7 +1341,7 @@ fn associated_path_def_to_ty(&self,
                                                     &assoc_name.as_str(),
                                                     span) {
                     Ok(bound) => bound,
-                    Err(ErrorReported) => return (tcx.types.err, ty_path_def),
+                    Err(ErrorReported) => return (tcx.types.err, Def::Err),
                 }
             }
             (&ty::TyParam(_), Def::SelfTy(Some(trait_did), None)) => {
@@ -1351,7 +1351,7 @@ fn associated_path_def_to_ty(&self,
                                                      assoc_name,
                                                      span) {
                     Ok(bound) => bound,
-                    Err(ErrorReported) => return (tcx.types.err, ty_path_def),
+                    Err(ErrorReported) => return (tcx.types.err, Def::Err),
                 }
             }
             (&ty::TyParam(_), Def::TyParam(_, _, param_did, param_name)) => {
@@ -1361,7 +1361,7 @@ fn associated_path_def_to_ty(&self,
                                                      assoc_name,
                                                      span) {
                     Ok(bound) => bound,
-                    Err(ErrorReported) => return (tcx.types.err, ty_path_def),
+                    Err(ErrorReported) => return (tcx.types.err, Def::Err),
                 }
             }
             _ => {
@@ -1369,7 +1369,7 @@ fn associated_path_def_to_ty(&self,
                                                       &ty.to_string(),
                                                       "Trait",
                                                       &assoc_name.as_str());
-                return (tcx.types.err, ty_path_def);
+                return (tcx.types.err, Def::Err);
             }
         };
 
@@ -1574,45 +1574,46 @@ fn base_def_to_ty(&self,
         }
     }
 
-    // Note that both base_segments and assoc_segments may be empty, although not at
-    // the same time.
+    // Resolve possibly associated type path into a type and final definition.
+    // Note that both base_segments and assoc_segments may be empty, although not at same time.
     pub fn finish_resolving_def_to_ty(&self,
                                       rscope: &RegionScope,
                                       span: Span,
                                       param_mode: PathParamMode,
-                                      mut def: Def,
+                                      base_def: Def,
                                       opt_self_ty: Option<Ty<'tcx>>,
                                       base_path_ref_id: ast::NodeId,
                                       base_segments: &[hir::PathSegment],
                                       assoc_segments: &[hir::PathSegment])
                                       -> (Ty<'tcx>, Def) {
-        debug!("finish_resolving_def_to_ty(def={:?}, \
+        // Convert the base type.
+        debug!("finish_resolving_def_to_ty(base_def={:?}, \
                 base_segments={:?}, \
                 assoc_segments={:?})",
-               def,
+               base_def,
                base_segments,
                assoc_segments);
-        let mut ty = self.base_def_to_ty(rscope,
-                                         span,
-                                         param_mode,
-                                         def,
-                                         opt_self_ty,
-                                         base_path_ref_id,
-                                         base_segments);
-        debug!("finish_resolving_def_to_ty: base_def_to_ty returned {:?}", ty);
+        let base_ty = self.base_def_to_ty(rscope,
+                                          span,
+                                          param_mode,
+                                          base_def,
+                                          opt_self_ty,
+                                          base_path_ref_id,
+                                          base_segments);
+        debug!("finish_resolving_def_to_ty: base_def_to_ty returned {:?}", base_ty);
+
         // If any associated type segments remain, attempt to resolve them.
+        let (mut ty, mut def) = (base_ty, base_def);
         for segment in assoc_segments {
             debug!("finish_resolving_def_to_ty: segment={:?}", segment);
-            if ty.sty == ty::TyError {
+            // This is pretty bad (it will fail except for T::A and Self::A).
+            let (new_ty, new_def) = self.associated_path_def_to_ty(span, ty, def, segment);
+            ty = new_ty;
+            def = new_def;
+
+            if def == Def::Err {
                 break;
             }
-            // This is pretty bad (it will fail except for T::A and Self::A).
-            let (a_ty, a_def) = self.associated_path_def_to_ty(span,
-                                                               ty,
-                                                               def,
-                                                               segment);
-            ty = a_ty;
-            def = a_def;
         }
         (ty, def)
     }
@@ -1719,23 +1720,22 @@ pub fn ast_ty_to_ty(&self, rscope: &RegionScope, ast_ty: &hir::Ty) -> Ty<'tcx> {
             hir::TyPath(ref maybe_qself, ref path) => {
                 debug!("ast_ty_to_ty: maybe_qself={:?} path={:?}", maybe_qself, path);
                 let path_res = tcx.expect_resolution(ast_ty.id);
-                let def = path_res.base_def;
                 let base_ty_end = path.segments.len() - path_res.depth;
                 let opt_self_ty = maybe_qself.as_ref().map(|qself| {
                     self.ast_ty_to_ty(rscope, &qself.ty)
                 });
-                let (ty, _def) = self.finish_resolving_def_to_ty(rscope,
-                                                                 ast_ty.span,
-                                                                 PathParamMode::Explicit,
-                                                                 def,
-                                                                 opt_self_ty,
-                                                                 ast_ty.id,
-                                                                 &path.segments[..base_ty_end],
-                                                                 &path.segments[base_ty_end..]);
-
-                if path_res.depth != 0 && ty.sty != ty::TyError {
-                    // Write back the new resolution.
-                    tcx.def_map.borrow_mut().insert(ast_ty.id, def::PathResolution::new(def));
+                let (ty, def) = self.finish_resolving_def_to_ty(rscope,
+                                                                ast_ty.span,
+                                                                PathParamMode::Explicit,
+                                                                path_res.base_def,
+                                                                opt_self_ty,
+                                                                ast_ty.id,
+                                                                &path.segments[..base_ty_end],
+                                                                &path.segments[base_ty_end..]);
+
+                // Write back the new resolution.
+                if path_res.depth != 0 {
+                    tcx.def_map.borrow_mut().insert(ast_ty.id, PathResolution::new(def));
                 }
 
                 ty
index 069a09183a738e7fd8cb6cd26fa871f1bbb6ef43..e90b32cd5dfc0192c5001b46a8dc2795acab7f39 100644 (file)
 
 use hir::def::Def;
 use rustc::infer::{self, InferOk, TypeOrigin};
-use hir::pat_util::{EnumerateAndAdjustIterator, pat_is_resolved_const};
+use hir::pat_util::EnumerateAndAdjustIterator;
 use rustc::ty::subst::Substs;
-use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference};
+use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference, VariantKind};
 use check::{FnCtxt, Expectation};
 use lint;
 use util::nodemap::FnvHashMap;
-use session::Session;
 
 use std::collections::hash_map::Entry::{Occupied, Vacant};
 use std::cmp;
 use rustc::hir::{self, PatKind};
 use rustc::hir::print as pprust;
 
-// This function exists due to the warning "diagnostic code E0164 already used"
-fn bad_struct_kind_err(sess: &Session, pat: &hir::Pat, path: &hir::Path, lint: bool) {
-    let name = pprust::path_to_string(path);
-    let msg = format!("`{}` does not name a tuple variant or a tuple struct", name);
-    if lint {
-        sess.add_lint(lint::builtin::MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT,
-                      pat.id,
-                      pat.span,
-                      msg);
-    } else {
-        span_err!(sess, pat.span, E0164, "{}", msg);
-    }
-}
-
 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
     pub fn check_pat(&self, pat: &'gcx hir::Pat, expected: Ty<'tcx>) {
         let tcx = self.tcx;
@@ -136,22 +121,6 @@ pub fn check_pat(&self, pat: &'gcx hir::Pat, expected: Ty<'tcx>) {
                 // subtyping doesn't matter here, as the value is some kind of scalar
                 self.demand_eqtype(pat.span, expected, lhs_ty);
             }
-            PatKind::Path(..) if pat_is_resolved_const(&tcx.def_map.borrow(), pat) => {
-                let const_did = tcx.expect_def(pat.id).def_id();
-                let const_scheme = tcx.lookup_item_type(const_did);
-                assert!(const_scheme.generics.is_empty());
-                let const_ty = self.instantiate_type_scheme(pat.span,
-                                                            &Substs::empty(),
-                                                            &const_scheme.ty);
-                self.write_ty(pat.id, const_ty);
-
-                // FIXME(#20489) -- we should limit the types here to scalars or something!
-
-                // As with PatKind::Lit, what we really want here is that there
-                // exist a LUB, but for the cases that can occur, subtype
-                // is good enough.
-                self.demand_suptype(pat.span, expected, const_ty);
-            }
             PatKind::Binding(bm, _, ref sub) => {
                 let typ = self.local_ty(pat.span, pat.id);
                 match bm {
@@ -197,33 +166,11 @@ pub fn check_pat(&self, pat: &'gcx hir::Pat, expected: Ty<'tcx>) {
                 }
             }
             PatKind::TupleStruct(ref path, ref subpats, ddpos) => {
-                self.check_pat_enum(pat, path, &subpats, ddpos, expected, true);
-            }
-            PatKind::Path(ref path) => {
-                self.check_pat_enum(pat, path, &[], None, expected, false);
+                self.check_pat_tuple_struct(pat, path, &subpats, ddpos, expected);
             }
-            PatKind::QPath(ref qself, ref path) => {
-                let self_ty = self.to_ty(&qself.ty);
-                let path_res = tcx.expect_resolution(pat.id);
-                if path_res.base_def == Def::Err {
-                    self.set_tainted_by_errors();
-                    self.write_error(pat.id);
-                    return;
-                }
-                if let Some((opt_ty, segments, def)) =
-                        self.resolve_ty_and_def_ufcs(path_res, Some(self_ty),
-                                                     path, pat.span, pat.id) {
-                    if self.check_assoc_item_is_const(def, pat.span) {
-                        let scheme = tcx.lookup_item_type(def.def_id());
-                        let predicates = tcx.lookup_predicates(def.def_id());
-                        self.instantiate_path(segments, scheme, &predicates,
-                                              opt_ty, def, pat.span, pat.id);
-                        let const_ty = self.node_ty(pat.id);
-                        self.demand_suptype(pat.span, expected, const_ty);
-                    } else {
-                        self.write_error(pat.id)
-                    }
-                }
+            PatKind::Path(ref opt_qself, ref path) => {
+                let opt_qself_ty = opt_qself.as_ref().map(|qself| self.to_ty(&qself.ty));
+                self.check_pat_path(pat, opt_qself_ty, path, expected);
             }
             PatKind::Struct(ref path, ref fields, etc) => {
                 self.check_pat_struct(pat, path, fields, etc, expected);
@@ -403,20 +350,6 @@ pub fn check_pat(&self, pat: &'gcx hir::Pat, expected: Ty<'tcx>) {
         // subtyping.
     }
 
-    fn check_assoc_item_is_const(&self, def: Def, span: Span) -> bool {
-        match def {
-            Def::AssociatedConst(..) => true,
-            Def::Method(..) => {
-                span_err!(self.tcx.sess, span, E0327,
-                          "associated items in match patterns must be constants");
-                false
-            }
-            _ => {
-                span_bug!(span, "non-associated item in check_assoc_item_is_const");
-            }
-        }
-    }
-
     pub fn check_dereferencable(&self, span: Span, expected: Ty<'tcx>, inner: &hir::Pat) -> bool {
         if let PatKind::Binding(..) = inner.node {
             if let Some(mt) = self.shallow_resolve(expected).builtin_deref(true, ty::NoPreference) {
@@ -554,167 +487,166 @@ pub fn check_match(&self,
 }
 
 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
-    pub fn check_pat_struct(&self, pat: &'gcx hir::Pat,
-                            path: &hir::Path, fields: &'gcx [Spanned<hir::FieldPat>],
-                            etc: bool, expected: Ty<'tcx>) {
-        let tcx = self.tcx;
-
-        let def = tcx.expect_def(pat.id);
-        let variant = match self.def_struct_variant(def, path.span) {
-            Some((_, variant)) => variant,
-            None => {
-                let name = pprust::path_to_string(path);
-                span_err!(tcx.sess, pat.span, E0163,
-                          "`{}` does not name a struct or a struct variant", name);
-                self.write_error(pat.id);
-
-                for field in fields {
-                    self.check_pat(&field.node.pat, tcx.types.err);
-                }
-                return;
+    fn check_pat_struct(&self,
+                        pat: &'gcx hir::Pat,
+                        path: &hir::Path,
+                        fields: &'gcx [Spanned<hir::FieldPat>],
+                        etc: bool,
+                        expected: Ty<'tcx>)
+    {
+        // Resolve the path and check the definition for errors.
+        let (variant, pat_ty) = if let Some(variant_ty) = self.check_struct_path(path, pat.id,
+                                                                                 pat.span) {
+            variant_ty
+        } else {
+            self.write_error(pat.id);
+            for field in fields {
+                self.check_pat(&field.node.pat, self.tcx.types.err);
             }
+            return;
         };
 
-        let pat_ty = self.instantiate_type(def.def_id(), path);
-        let item_substs = match pat_ty.sty {
+        // Type check the path.
+        self.demand_eqtype(pat.span, expected, pat_ty);
+
+        // Type check subpatterns.
+        let substs = match pat_ty.sty {
             ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
             _ => span_bug!(pat.span, "struct variant is not an ADT")
         };
-        self.demand_eqtype(pat.span, expected, pat_ty);
-        self.check_struct_pat_fields(pat.span, fields, variant, &item_substs, etc);
-
-        self.write_ty(pat.id, pat_ty);
-        self.write_substs(pat.id, ty::ItemSubsts {
-            substs: item_substs
-        });
+        self.check_struct_pat_fields(pat.span, fields, variant, substs, etc);
     }
 
-    fn check_pat_enum(&self,
+    fn check_pat_path(&self,
                       pat: &hir::Pat,
+                      opt_self_ty: Option<Ty<'tcx>>,
                       path: &hir::Path,
-                      subpats: &'gcx [P<hir::Pat>],
-                      ddpos: Option<usize>,
-                      expected: Ty<'tcx>,
-                      is_tuple_struct_pat: bool)
+                      expected: Ty<'tcx>)
     {
-        // Typecheck the path.
         let tcx = self.tcx;
-
-        let path_res = tcx.expect_resolution(pat.id);
-        if path_res.base_def == Def::Err {
-            self.set_tainted_by_errors();
+        let report_unexpected_def = || {
+            span_err!(tcx.sess, pat.span, E0533,
+                      "`{}` does not name a unit variant, unit struct or a constant",
+                      pprust::path_to_string(path));
             self.write_error(pat.id);
-
-            for pat in subpats {
-                self.check_pat(&pat, tcx.types.err);
-            }
-            return;
-        }
-
-        let (opt_ty, segments, def) = match self.resolve_ty_and_def_ufcs(path_res,
-                                                                         None, path,
-                                                                         pat.span, pat.id) {
-            Some(resolution) => resolution,
-            // Error handling done inside resolve_ty_and_def_ufcs, so if
-            // resolution fails just return.
-            None => {return;}
         };
 
-        // Items that were partially resolved before should have been resolved to
-        // associated constants (i.e. not methods).
-        if path_res.depth != 0 && !self.check_assoc_item_is_const(def, pat.span) {
-            self.write_error(pat.id);
-            return;
+        // Resolve the path and check the definition for errors.
+        let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(opt_self_ty, path,
+                                                                   pat.id, pat.span);
+        match def {
+            Def::Err => {
+                self.set_tainted_by_errors();
+                self.write_error(pat.id);
+                return;
+            }
+            Def::Method(..) => {
+                report_unexpected_def();
+                return;
+            }
+            Def::Variant(..) | Def::Struct(..) => {
+                let variant = tcx.expect_variant_def(def);
+                if variant.kind != VariantKind::Unit {
+                    report_unexpected_def();
+                    return;
+                }
+            }
+            Def::Const(..) | Def::AssociatedConst(..) => {} // OK
+            _ => bug!("unexpected pattern definition {:?}", def)
         }
 
-        let enum_def = def.variant_def_ids()
-            .map_or_else(|| def.def_id(), |(enum_def, _)| enum_def);
+        // Type check the path.
+        let scheme = tcx.lookup_item_type(def.def_id());
+        let predicates = tcx.lookup_predicates(def.def_id());
+        let pat_ty = self.instantiate_value_path(segments, scheme, &predicates,
+                                                 opt_ty, def, pat.span, pat.id);
+        self.demand_suptype(pat.span, expected, pat_ty);
+    }
 
-        let ctor_scheme = tcx.lookup_item_type(enum_def);
-        let ctor_predicates = tcx.lookup_predicates(enum_def);
-        let path_scheme = if ctor_scheme.ty.is_fn() {
-            let fn_ret = tcx.no_late_bound_regions(&ctor_scheme.ty.fn_ret()).unwrap();
-            ty::TypeScheme {
-                ty: fn_ret.unwrap(),
-                generics: ctor_scheme.generics,
-            }
-        } else {
-            ctor_scheme
-        };
-        self.instantiate_path(segments, path_scheme, &ctor_predicates,
-                              opt_ty, def, pat.span, pat.id);
-        let report_bad_struct_kind = |is_warning| {
-            bad_struct_kind_err(tcx.sess, pat, path, is_warning);
-            if is_warning { return; }
+    fn check_pat_tuple_struct(&self,
+                              pat: &hir::Pat,
+                              path: &hir::Path,
+                              subpats: &'gcx [P<hir::Pat>],
+                              ddpos: Option<usize>,
+                              expected: Ty<'tcx>)
+    {
+        let tcx = self.tcx;
+        let on_error = || {
             self.write_error(pat.id);
             for pat in subpats {
                 self.check_pat(&pat, tcx.types.err);
             }
         };
-
-        // If we didn't have a fully resolved path to start with, we had an
-        // associated const, and we should quit now, since the rest of this
-        // function uses checks specific to structs and enums.
-        if path_res.depth != 0 {
-            if is_tuple_struct_pat {
-                report_bad_struct_kind(false);
+        let report_unexpected_def = |is_lint| {
+            let msg = format!("`{}` does not name a tuple variant or a tuple struct",
+                              pprust::path_to_string(path));
+            if is_lint {
+                tcx.sess.add_lint(lint::builtin::MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT,
+                                  pat.id, pat.span, msg);
             } else {
-                let pat_ty = self.node_ty(pat.id);
-                self.demand_suptype(pat.span, expected, pat_ty);
+                span_err!(tcx.sess, pat.span, E0164, "{}", msg);
+                on_error();
             }
-            return;
-        }
-
-        let pat_ty = self.node_ty(pat.id);
-        self.demand_eqtype(pat.span, expected, pat_ty);
+        };
 
-        let real_path_ty = self.node_ty(pat.id);
-        let (kind_name, variant, expected_substs) = match real_path_ty.sty {
-            ty::TyEnum(enum_def, expected_substs) => {
-                let variant = enum_def.variant_of_def(def);
-                ("variant", variant, expected_substs)
-            }
-            ty::TyStruct(struct_def, expected_substs) => {
-                let variant = struct_def.struct_variant();
-                ("struct", variant, expected_substs)
-            }
-            _ => {
-                report_bad_struct_kind(false);
+        // Resolve the path and check the definition for errors.
+        let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(None, path, pat.id, pat.span);
+        let variant = match def {
+            Def::Err => {
+                self.set_tainted_by_errors();
+                on_error();
                 return;
             }
-        };
-
-        match (is_tuple_struct_pat, variant.kind()) {
-            (true, ty::VariantKind::Unit) if subpats.is_empty() && ddpos.is_some() => {
-                // Matching unit structs with tuple variant patterns (`UnitVariant(..)`)
-                // is allowed for backward compatibility.
-                report_bad_struct_kind(true);
+            Def::Const(..) | Def::AssociatedConst(..) | Def::Method(..) => {
+                report_unexpected_def(false);
+                return;
             }
-            (true, ty::VariantKind::Unit) |
-            (false, ty::VariantKind::Tuple) |
-            (_, ty::VariantKind::Struct) => {
-                report_bad_struct_kind(false);
-                return
+            Def::Variant(..) | Def::Struct(..) => {
+                tcx.expect_variant_def(def)
             }
-            _ => {}
+            _ => bug!("unexpected pattern definition {:?}", def)
+        };
+        if variant.kind == VariantKind::Unit && subpats.is_empty() && ddpos.is_some() {
+            // Matching unit structs with tuple variant patterns (`UnitVariant(..)`)
+            // is allowed for backward compatibility.
+            report_unexpected_def(true);
+        } else if variant.kind != VariantKind::Tuple {
+            report_unexpected_def(false);
+            return;
         }
 
+        // Type check the path.
+        let scheme = tcx.lookup_item_type(def.def_id());
+        let scheme = if scheme.ty.is_fn() {
+            // Replace constructor type with constructed type for tuple struct patterns.
+            let fn_ret = tcx.no_late_bound_regions(&scheme.ty.fn_ret()).unwrap().unwrap();
+            ty::TypeScheme { ty: fn_ret, generics: scheme.generics }
+        } else {
+            // Leave the type as is for unit structs (backward compatibility).
+            scheme
+        };
+        let predicates = tcx.lookup_predicates(def.def_id());
+        let pat_ty = self.instantiate_value_path(segments, scheme, &predicates,
+                                                 opt_ty, def, pat.span, pat.id);
+        self.demand_eqtype(pat.span, expected, pat_ty);
+
+        // Type check subpatterns.
         if subpats.len() == variant.fields.len() ||
                 subpats.len() < variant.fields.len() && ddpos.is_some() {
+            let substs = match pat_ty.sty {
+                ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs,
+                ref ty => bug!("unexpected pattern type {:?}", ty),
+            };
             for (i, subpat) in subpats.iter().enumerate_and_adjust(variant.fields.len(), ddpos) {
-                let field_ty = self.field_ty(subpat.span, &variant.fields[i], expected_substs);
+                let field_ty = self.field_ty(subpat.span, &variant.fields[i], substs);
                 self.check_pat(&subpat, field_ty);
             }
         } else {
             span_err!(tcx.sess, pat.span, E0023,
-                      "this pattern has {} field{}, but the corresponding {} has {} field{}",
-                      subpats.len(), if subpats.len() == 1 {""} else {"s"},
-                      kind_name,
-                      variant.fields.len(), if variant.fields.len() == 1 {""} else {"s"});
-
-            for pat in subpats {
-                self.check_pat(&pat, tcx.types.err);
-            }
+                      "this pattern has {} field{s}, but the corresponding {} has {} field{s}",
+                      subpats.len(), def.kind_name(), variant.fields.len(),
+                      s = if variant.fields.len() == 1 {""} else {"s"});
+            on_error();
         }
     }
 
index 3bc90f05d2536c2f41ebcef9a45dcfc4bc2a5a29..8daa16180a90515883f2b76516569e62503bb7fa 100644 (file)
@@ -84,7 +84,7 @@
 use dep_graph::DepNode;
 use fmt_macros::{Parser, Piece, Position};
 use middle::cstore::LOCAL_CRATE;
-use hir::def::{self, Def};
+use hir::def::{Def, PathResolution};
 use hir::def_id::DefId;
 use hir::pat_util;
 use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin, TypeTrace, type_variable};
@@ -1621,64 +1621,32 @@ fn normalize_associated_type(&self,
     ///
     /// Note that this function is only intended to be used with type-paths,
     /// not with value-paths.
-    pub fn instantiate_type(&self,
-                            did: DefId,
-                            path: &hir::Path)
-                            -> Ty<'tcx>
-    {
-        debug!("instantiate_type(did={:?}, path={:?})", did, path);
-        let type_scheme =
-            self.tcx.lookup_item_type(did);
-        let type_predicates =
-            self.tcx.lookup_predicates(did);
+    pub fn instantiate_type_path(&self,
+                                 did: DefId,
+                                 path: &hir::Path,
+                                 node_id: ast::NodeId)
+                                 -> Ty<'tcx> {
+        debug!("instantiate_type_path(did={:?}, path={:?})", did, path);
+        let type_scheme = self.tcx.lookup_item_type(did);
+        let type_predicates = self.tcx.lookup_predicates(did);
         let substs = AstConv::ast_path_substs_for_ty(self, self,
                                                      path.span,
                                                      PathParamMode::Optional,
                                                      &type_scheme.generics,
                                                      path.segments.last().unwrap());
-        debug!("instantiate_type: ty={:?} substs={:?}", &type_scheme.ty, &substs);
-        let bounds =
-            self.instantiate_bounds(path.span, &substs, &type_predicates);
-        self.add_obligations_for_parameters(
-            traits::ObligationCause::new(
-                path.span,
-                self.body_id,
-                traits::ItemObligation(did)),
-            &bounds);
-
-        self.instantiate_type_scheme(path.span, &substs, &type_scheme.ty)
-    }
-
-    /// Return the dict-like variant corresponding to a given `Def`.
-    pub fn def_struct_variant(&self,
-                              def: Def,
-                              _span: Span)
-                              -> Option<(ty::AdtDef<'tcx>, ty::VariantDef<'tcx>)>
-    {
-        let (adt, variant) = match def {
-            Def::Variant(enum_id, variant_id) => {
-                let adt = self.tcx.lookup_adt_def(enum_id);
-                (adt, adt.variant_with_id(variant_id))
-            }
-            Def::Struct(did) | Def::TyAlias(did) => {
-                let typ = self.tcx.lookup_item_type(did);
-                if let ty::TyStruct(adt, _) = typ.ty.sty {
-                    (adt, adt.struct_variant())
-                } else {
-                    return None;
-                }
-            }
-            _ => return None
-        };
+        let substs = self.tcx.mk_substs(substs);
+        debug!("instantiate_type_path: ty={:?} substs={:?}", &type_scheme.ty, substs);
+        let bounds = self.instantiate_bounds(path.span, substs, &type_predicates);
+        let cause = traits::ObligationCause::new(path.span, self.body_id,
+                                                 traits::ItemObligation(did));
+        self.add_obligations_for_parameters(cause, &bounds);
 
-        let var_kind = variant.kind();
-        if var_kind == ty::VariantKind::Struct {
-            Some((adt, variant))
-        } else if var_kind == ty::VariantKind::Unit {
-             Some((adt, variant))
-         } else {
-             None
-         }
+        let ty_substituted = self.instantiate_type_scheme(path.span, substs, &type_scheme.ty);
+        self.write_ty(node_id, ty_substituted);
+        self.write_substs(node_id, ty::ItemSubsts {
+            substs: substs
+        });
+        ty_substituted
     }
 
     pub fn write_nil(&self, node_id: ast::NodeId) {
@@ -2998,7 +2966,7 @@ fn check_tup_field(&self,
         while let Some((base_t, autoderefs)) = autoderef.next() {
             let field = match base_t.sty {
                 ty::TyStruct(base_def, substs) => {
-                    tuple_like = base_def.struct_variant().is_tuple_struct();
+                    tuple_like = base_def.struct_variant().kind == ty::VariantKind::Tuple;
                     if !tuple_like { continue }
 
                     debug!("tuple struct named {:?}",  base_t);
@@ -3153,35 +3121,57 @@ fn check_struct_fields_on_error(&self,
         }
     }
 
+    pub fn check_struct_path(&self,
+                         path: &hir::Path,
+                         node_id: ast::NodeId,
+                         span: Span)
+                         -> Option<(ty::VariantDef<'tcx>,  Ty<'tcx>)> {
+        let def = self.finish_resolving_struct_path(path, node_id, span);
+        let variant = match def {
+            Def::Err => {
+                self.set_tainted_by_errors();
+                return None;
+            }
+            Def::Variant(..) | Def::Struct(..) => {
+                Some(self.tcx.expect_variant_def(def))
+            }
+            Def::TyAlias(did) | Def::AssociatedTy(_, did) => {
+                if let Some(&ty::TyStruct(adt, _)) = self.tcx.opt_lookup_item_type(did)
+                                                             .map(|scheme| &scheme.ty.sty) {
+                    Some(adt.struct_variant())
+                } else {
+                    None
+                }
+            }
+            _ => None
+        };
+        if variant.is_none() || variant.unwrap().kind == ty::VariantKind::Tuple {
+            // Reject tuple structs for now, braced and unit structs are allowed.
+            span_err!(self.tcx.sess, span, E0071,
+                      "`{}` does not name a struct or a struct variant",
+                      pprust::path_to_string(path));
+            return None;
+        }
+
+        let ty = self.instantiate_type_path(def.def_id(), path, node_id);
+        Some((variant.unwrap(), ty))
+    }
+
     fn check_expr_struct(&self,
                          expr: &hir::Expr,
                          path: &hir::Path,
                          fields: &'gcx [hir::Field],
                          base_expr: &'gcx Option<P<hir::Expr>>)
     {
-        let tcx = self.tcx;
-
         // Find the relevant variant
-        let def = tcx.expect_def(expr.id);
-        if def == Def::Err {
-            self.set_tainted_by_errors();
+        let (variant, expr_ty) = if let Some(variant_ty) = self.check_struct_path(path, expr.id,
+                                                                                  expr.span) {
+            variant_ty
+        } else {
             self.check_struct_fields_on_error(expr.id, fields, base_expr);
             return;
-        }
-        let variant = match self.def_struct_variant(def, path.span) {
-            Some((_, variant)) => variant,
-            None => {
-                span_err!(self.tcx.sess, path.span, E0071,
-                          "`{}` does not name a structure",
-                          pprust::path_to_string(path));
-                self.check_struct_fields_on_error(expr.id, fields, base_expr);
-                return;
-            }
         };
 
-        let expr_ty = self.instantiate_type(def.def_id(), path);
-        self.write_ty(expr.id, expr_ty);
-
         self.check_expr_struct_fields(expr_ty, path.span, variant, fields,
                                       base_expr.is_none());
         if let &Some(ref base_expr) = base_expr {
@@ -3192,13 +3182,13 @@ fn check_expr_struct(&self,
                         expr.id,
                         adt.struct_variant().fields.iter().map(|f| {
                             self.normalize_associated_types_in(
-                                expr.span, &f.ty(tcx, substs)
+                                expr.span, &f.ty(self.tcx, substs)
                             )
                         }).collect()
                     );
                 }
                 _ => {
-                    span_err!(tcx.sess, base_expr.span, E0436,
+                    span_err!(self.tcx.sess, base_expr.span, E0436,
                               "functional record update syntax requires a struct");
                 }
             }
@@ -3349,24 +3339,18 @@ fn check_expr_with_expectation_and_lvalue_pref(&self,
             };
             self.write_ty(id, oprnd_t);
           }
-          hir::ExprPath(ref maybe_qself, ref path) => {
-              let opt_self_ty = maybe_qself.as_ref().map(|qself| {
-                  self.to_ty(&qself.ty)
-              });
-
-              let path_res = tcx.expect_resolution(id);
-              if let Some((opt_ty, segments, def)) =
-                      self.resolve_ty_and_def_ufcs(path_res, opt_self_ty, path,
-                                                   expr.span, expr.id) {
-                  if def != Def::Err {
-                      let (scheme, predicates) = self.type_scheme_and_predicates_for_def(expr.span,
-                                                                                         def);
-                      self.instantiate_path(segments, scheme, &predicates,
-                                            opt_ty, def, expr.span, id);
-                  } else {
-                      self.set_tainted_by_errors();
-                      self.write_ty(id, self.tcx.types.err);
-                  }
+          hir::ExprPath(ref opt_qself, ref path) => {
+              let opt_self_ty = opt_qself.as_ref().map(|qself| self.to_ty(&qself.ty));
+              let (def, opt_ty, segments) = self.resolve_ty_and_def_ufcs(opt_self_ty, path,
+                                                                         expr.id, expr.span);
+              if def != Def::Err {
+                  let (scheme, predicates) = self.type_scheme_and_predicates_for_def(expr.span,
+                                                                                     def);
+                  self.instantiate_value_path(segments, scheme, &predicates,
+                                              opt_ty, def, expr.span, id);
+              } else {
+                  self.set_tainted_by_errors();
+                  self.write_error(id);
               }
 
               // We always require that the type provided as the value for
@@ -3704,37 +3688,67 @@ fn check_expr_with_expectation_and_lvalue_pref(&self,
                expected);
     }
 
+    // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
+    // The newly resolved definition is written into `def_map`.
+    pub fn finish_resolving_struct_path(&self,
+                                        path: &hir::Path,
+                                        node_id: ast::NodeId,
+                                        span: Span)
+                                        -> Def
+    {
+        let path_res = self.tcx().expect_resolution(node_id);
+        if path_res.depth == 0 {
+            // If fully resolved already, we don't have to do anything.
+            path_res.base_def
+        } else {
+            let base_ty_end = path.segments.len() - path_res.depth;
+            let (_ty, def) = AstConv::finish_resolving_def_to_ty(self, self, span,
+                                                                 PathParamMode::Optional,
+                                                                 path_res.base_def,
+                                                                 None,
+                                                                 node_id,
+                                                                 &path.segments[..base_ty_end],
+                                                                 &path.segments[base_ty_end..]);
+            // Write back the new resolution.
+            self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
+            def
+        }
+    }
+
+    // Resolve associated value path into a base type and associated constant or method definition.
+    // The newly resolved definition is written into `def_map`.
     pub fn resolve_ty_and_def_ufcs<'b>(&self,
-                                       path_res: def::PathResolution,
                                        opt_self_ty: Option<Ty<'tcx>>,
                                        path: &'b hir::Path,
-                                       span: Span,
-                                       node_id: ast::NodeId)
-                                       -> Option<(Option<Ty<'tcx>>, &'b [hir::PathSegment], Def)>
+                                       node_id: ast::NodeId,
+                                       span: Span)
+                                       -> (Def, Option<Ty<'tcx>>, &'b [hir::PathSegment])
     {
-
-        // If fully resolved already, we don't have to do anything.
+        let path_res = self.tcx().expect_resolution(node_id);
         if path_res.depth == 0 {
-            Some((opt_self_ty, &path.segments, path_res.base_def))
+            // If fully resolved already, we don't have to do anything.
+            (path_res.base_def, opt_self_ty, &path.segments)
         } else {
-            let def = path_res.base_def;
+            // Try to resolve everything except for the last segment as a type.
             let ty_segments = path.segments.split_last().unwrap().1;
             let base_ty_end = path.segments.len() - path_res.depth;
             let (ty, _def) = AstConv::finish_resolving_def_to_ty(self, self, span,
                                                                  PathParamMode::Optional,
-                                                                 def,
+                                                                 path_res.base_def,
                                                                  opt_self_ty,
                                                                  node_id,
                                                                  &ty_segments[..base_ty_end],
                                                                  &ty_segments[base_ty_end..]);
+
+            // Resolve an associated constant or method on the previously resolved type.
             let item_segment = path.segments.last().unwrap();
             let item_name = item_segment.name;
             let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
-                Ok(def) => Some(def),
+                Ok(def) => def,
                 Err(error) => {
                     let def = match error {
-                        method::MethodError::PrivateMatch(def) => Some(def),
-                        _ => None,
+                        method::MethodError::PrivateMatch(def) => def,
+                        _ => Def::Err,
                     };
                     if item_name != keywords::Invalid.name() {
                         self.report_method_error(span, ty, item_name, None, error);
@@ -3743,14 +3757,9 @@ pub fn resolve_ty_and_def_ufcs<'b>(&self,
                 }
             };
 
-            if let Some(def) = def {
-                // Write back the new resolution.
-                self.tcx().def_map.borrow_mut().insert(node_id, def::PathResolution::new(def));
-                Some((Some(ty), slice::ref_slice(item_segment), def))
-            } else {
-                self.write_error(node_id);
-                None
-            }
+            // Write back the new resolution.
+            self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def));
+            (def, Some(ty), slice::ref_slice(item_segment))
         }
     }
 
@@ -3986,15 +3995,16 @@ fn type_scheme_and_predicates_for_def(&self,
 
     // Instantiates the given path, which must refer to an item with the given
     // number of type parameters and type.
-    pub fn instantiate_path(&self,
-                            segments: &[hir::PathSegment],
-                            type_scheme: TypeScheme<'tcx>,
-                            type_predicates: &ty::GenericPredicates<'tcx>,
-                            opt_self_ty: Option<Ty<'tcx>>,
-                            def: Def,
-                            span: Span,
-                            node_id: ast::NodeId) {
-        debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})",
+    pub fn instantiate_value_path(&self,
+                                  segments: &[hir::PathSegment],
+                                  type_scheme: TypeScheme<'tcx>,
+                                  type_predicates: &ty::GenericPredicates<'tcx>,
+                                  opt_self_ty: Option<Ty<'tcx>>,
+                                  def: Def,
+                                  span: Span,
+                                  node_id: ast::NodeId)
+                                  -> Ty<'tcx> {
+        debug!("instantiate_value_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})",
                segments,
                def,
                node_id,
@@ -4023,7 +4033,7 @@ pub fn instantiate_path(&self,
         //    actually pass through this function, but rather the
         //    `ast_ty_to_ty` function in `astconv`. However, in the case
         //    of struct patterns (and maybe literals) we do invoke
-        //    `instantiate_path` to get the general type of an instance of
+        //    `instantiate_value_path` to get the general type of an instance of
         //    a struct. (In these cases, there are actually no type
         //    parameters permitted at present, but perhaps we will allow
         //    them in the future.)
@@ -4246,20 +4256,21 @@ pub fn instantiate_path(&self,
                 }
                 Err(_) => {
                     span_bug!(span,
-                        "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
+                        "instantiate_value_path: (UFCS) {:?} was a subtype of {:?} but now is not?",
                         self_ty,
                         impl_ty);
                 }
             }
         }
 
-        debug!("instantiate_path: type of {:?} is {:?}",
+        debug!("instantiate_value_path: type of {:?} is {:?}",
                node_id,
                ty_substituted);
         self.write_ty(node_id, ty_substituted);
         self.write_substs(node_id, ty::ItemSubsts {
             substs: substs
         });
+        ty_substituted
     }
 
     /// Finds the parameters that the user provided and adds them to `substs`. If too many
index 2c33d1a81556eb9f28cae32520627a8aefa6d822..41e7a467fa33a89ff80aa890306ccdaf63d1b511 100644 (file)
@@ -949,7 +949,7 @@ fn convert_variant_ctor<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
                                   scheme: ty::TypeScheme<'tcx>,
                                   predicates: ty::GenericPredicates<'tcx>) {
     let tcx = ccx.tcx;
-    let ctor_ty = match variant.kind() {
+    let ctor_ty = match variant.kind {
         VariantKind::Unit | VariantKind::Struct => scheme.ty,
         VariantKind::Tuple => {
             let inputs: Vec<_> =
@@ -1040,15 +1040,17 @@ fn convert_struct_def<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
                                 def: &hir::VariantData)
                                 -> ty::AdtDefMaster<'tcx>
 {
-
     let did = ccx.tcx.map.local_def_id(it.id);
-    let ctor_id = if !def.is_struct() {
-        ccx.tcx.map.local_def_id(def.id())
-    } else {
-        did
-    };
-    ccx.tcx.intern_adt_def(did, ty::AdtKind::Struct,
-        vec![convert_struct_variant(ccx, ctor_id, it.name, ConstInt::Infer(0), def)])
+    // Use separate constructor id for unit/tuple structs and reuse did for braced structs.
+    let ctor_id = if !def.is_struct() { Some(ccx.tcx.map.local_def_id(def.id())) } else { None };
+    let variants = vec![convert_struct_variant(ccx, ctor_id.unwrap_or(did), it.name,
+                                               ConstInt::Infer(0), def)];
+    let adt = ccx.tcx.intern_adt_def(did, ty::AdtKind::Struct, variants);
+    if let Some(ctor_id) = ctor_id {
+        // Make adt definition available through constructor id as well.
+        ccx.tcx.insert_adt_def(ctor_id, adt);
+    }
+    adt
 }
 
     fn evaluate_disr_expr(ccx: &CrateCtxt, repr_ty: attr::IntType, e: &hir::Expr)
index cdac66a2272379f43011c6ed51162afc3d7a5338..8769bc1a32b5080856297c7f7928c14e8b3eeb32 100644 (file)
@@ -1895,33 +1895,6 @@ fn my_start(argc: isize, argv: *const *const u8) -> isize {
 ```
 "##,
 
-E0163: r##"
-This error means that an attempt was made to match an enum variant as a
-struct type when the variant isn't a struct type:
-
-```compile_fail
-enum Foo { B(u32) }
-
-fn bar(foo: Foo) -> u32 {
-    match foo {
-        B{i} => i, // error E0163
-    }
-}
-```
-
-Try using `()` instead:
-
-```
-enum Foo { B(u32) }
-
-fn bar(foo: Foo) -> u32 {
-    match foo {
-        Foo::B(i) => i,
-    }
-}
-```
-"##,
-
 E0164: r##"
 This error means that an attempt was made to match a struct type enum
 variant as a non-struct type:
@@ -3225,42 +3198,6 @@ impl Foo for Bar {
 ```
 "##,
 
-E0327: r##"
-You cannot use associated items other than constant items as patterns. This
-includes method items. Example of erroneous code:
-
-```compile_fail
-enum B {}
-
-impl B {
-    fn bb() -> i32 { 0 }
-}
-
-fn main() {
-    match 0 {
-        B::bb => {} // error: associated items in match patterns must
-                    // be constants
-    }
-}
-```
-
-Please check that you're not using a method as a pattern. Example:
-
-```
-enum B {
-    ba,
-    bb
-}
-
-fn main() {
-    match B::ba {
-        B::bb => {} // ok!
-        _ => {}
-    }
-}
-```
-"##,
-
 E0329: r##"
 An attempt was made to access an associated constant through either a generic
 type parameter or `Self`. This is not supported yet. An example causing this
@@ -4106,6 +4043,7 @@ fn fly(&self) {} // And now that's ok!
 //  E0129,
 //  E0141,
 //  E0159, // use of trait `{}` as struct constructor
+//  E0163, // merged into E0071
     E0167,
 //  E0168,
 //  E0173, // manual implementations of unboxed closure traits are experimental
@@ -4162,4 +4100,5 @@ fn fly(&self) {} // And now that's ok!
     E0527, // expected {} elements, found {}
     E0528, // expected at least {} elements, found {}
     E0529, // slice pattern expects array or slice, not `{}`
+    E0533, // `{}` does not name a unit variant, unit struct or a constant
 }
index f570375de5ea156ed8828fc75d78880b7031dfb0..a9ea46403f8e016b5138b98140ac5720b2234f33 100644 (file)
@@ -232,8 +232,8 @@ pub fn to_digit(self, radix: u32) -> Option<u32> {
     /// Returns an iterator that yields the hexadecimal Unicode escape of a
     /// character, as `char`s.
     ///
-    /// All characters are escaped with Rust syntax of the form `\\u{NNNN}`
-    /// where `NNNN` is the shortest hexadecimal representation.
+    /// All characters are escaped with Rust syntax of the form `\u{NNNNNN}`
+    /// where `NNNNNN` is the shortest hexadecimal representation.
     ///
     /// # Examples
     ///
index 7827459baa87f310818813f69320e9982cbd0859..7da17b3749104ecff45962ead1407cb111076556 100644 (file)
@@ -1904,7 +1904,7 @@ fn clean(&self, cx: &DocContext) -> Item {
 
 impl<'tcx> Clean<Item> for ty::VariantDefData<'tcx, 'static> {
     fn clean(&self, cx: &DocContext) -> Item {
-        let kind = match self.kind() {
+        let kind = match self.kind {
             ty::VariantKind::Unit => CLikeVariant,
             ty::VariantKind::Tuple => {
                 TupleVariant(
@@ -2578,9 +2578,9 @@ fn name_from_pat(p: &hir::Pat) -> String {
     match p.node {
         PatKind::Wild => "_".to_string(),
         PatKind::Binding(_, ref p, _) => p.node.to_string(),
-        PatKind::TupleStruct(ref p, _, _) | PatKind::Path(ref p) => path_to_string(p),
-        PatKind::QPath(..) => panic!("tried to get argument name from PatKind::QPath, \
-                                which is not allowed in function arguments"),
+        PatKind::TupleStruct(ref p, _, _) | PatKind::Path(None, ref p) => path_to_string(p),
+        PatKind::Path(..) => panic!("tried to get argument name from qualified PatKind::Path, \
+                                     which is not allowed in function arguments"),
         PatKind::Struct(ref name, ref fields, etc) => {
             format!("{} {{ {}{} }}", path_to_string(name),
                 fields.iter().map(|&Spanned { node: ref fp, .. }|
@@ -2653,7 +2653,7 @@ fn resolve_type(cx: &DocContext,
         Def::SelfTy(..) if path.segments.len() == 1 => {
             return Generic(keywords::SelfType.name().to_string());
         }
-        Def::SelfTy(..) | Def::TyParam(..) => true,
+        Def::SelfTy(..) | Def::TyParam(..) | Def::AssociatedTy(..) => true,
         _ => false,
     };
     let did = register_def(&*cx, def);
index 84e98a6739193e9b917135c3b218e00cb15bd08f..096e1ecc9ffb6a33a67b1dceb84a2b8837dd1d26 100644 (file)
@@ -107,7 +107,7 @@ pub enum Class {
 ///
 /// The classifier will call into the `Writer` implementation as it finds spans
 /// of text to highlight. Exactly how that text should be highlighted is up to
-/// the implemention.
+/// the implementation.
 pub trait Writer {
     /// Called when we start processing a span of text that should be highlighted.
     /// The `Class` argument specifies how it should be highlighted.
index 6ab2bcc768590f16a5285188725d06f0c147039d..c263bcb04e9b6fc62b199100b5a65c6a47211598 100644 (file)
@@ -2716,7 +2716,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
         let parentlen = cx.current.len() - if it.is_mod() {1} else {0};
 
         // the sidebar is designed to display sibling functions, modules and
-        // other miscellaneous informations. since there are lots of sibling
+        // other miscellaneous information. since there are lots of sibling
         // items (and that causes quadratic growth in large modules),
         // we refactor common parts into a shared JavaScript file per module.
         // still, we don't move everything into JS because we want to preserve
index b45e059e6d5e9d87c671223d20dfa3da090e6327..303cc671f4a230c79181cd8ad91ad8b66992b237 100644 (file)
@@ -572,14 +572,6 @@ a.test-arrow {
     right: 5px;
 }
 
-.methods .section-header {
-    /* Override parent class attributes. */
-    border-bottom: none !important;
-    font-size: 1.1em !important;
-    margin: 0 0 -5px;
-    padding: 0;
-}
-
 .section-header:hover a:after {
     content: '\2002\00a7\2002';
 }
index e142c78569bd7ad597e3b71bfcb6db88df487023..05ae8ed5b0b66be71b491821fa99be19f2534f74 100644 (file)
@@ -214,6 +214,30 @@ pub fn last_os_error() -> Error {
     }
 
     /// Creates a new instance of an `Error` from a particular OS error code.
+    ///
+    /// # Examples
+    ///
+    /// On Linux:
+    ///
+    /// ```
+    /// # if cfg!(target_os = "linux") {
+    /// use std::io;
+    ///
+    /// let error = io::Error::from_raw_os_error(98);
+    /// assert_eq!(error.kind(), io::ErrorKind::AddrInUse);
+    /// # }
+    /// ```
+    ///
+    /// On Windows:
+    ///
+    /// ```
+    /// # if cfg!(windows) {
+    /// use std::io;
+    ///
+    /// let error = io::Error::from_raw_os_error(10048);
+    /// assert_eq!(error.kind(), io::ErrorKind::AddrInUse);
+    /// # }
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn from_raw_os_error(code: i32) -> Error {
         Error { repr: Repr::Os(code) }
index 07f43f72ff55ab1d987b60c70c37b909695f27a3..c8b52fc046769b9ca59986a6ec5e621dee8b9d6b 100644 (file)
@@ -152,8 +152,8 @@ pub struct Sink { _priv: () }
 /// ```rust
 /// use std::io::{self, Write};
 ///
-/// let mut buffer = vec![1, 2, 3, 5, 8];
-/// let num_bytes = io::sink().write(&mut buffer).unwrap();
+/// let buffer = vec![1, 2, 3, 5, 8];
+/// let num_bytes = io::sink().write(&buffer).unwrap();
 /// assert_eq!(num_bytes, 5);
 /// ```
 #[stable(feature = "rust1", since = "1.0.0")]
index 1d97611eabb2671261826c8abfa2a090dcc2f886..a408b4378e19e6b14cdb0f68478adf628f527dc0 100644 (file)
@@ -239,7 +239,7 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
         text[..offset].iter().rposition(|elt| *elt == x)
     }
 
-    // test fallback implementations on all plattforms
+    // test fallback implementations on all platforms
     #[test]
     fn matches_one() {
         assert_eq!(Some(0), memchr(b'a', b"a"));
index ad4cdef615847719d2f63ff078b8f36346a302e9..2d19561139b58144d12df7dc4c37f838199a1505 100644 (file)
@@ -1529,8 +1529,7 @@ pub fn parent(&self) -> Option<&Path> {
 
     /// The final component of the path, if it is a normal file.
     ///
-    /// If the path terminates in `.`, `..`, or consists solely of a root of
-    /// prefix, `file_name` will return `None`.
+    /// If the path terminates in `..`, `file_name` will return `None`.
     ///
     /// # Examples
     ///
@@ -1543,6 +1542,17 @@ pub fn parent(&self) -> Option<&Path> {
     ///
     /// assert_eq!(Some(os_str), path.file_name());
     /// ```
+    ///
+    /// # Other examples
+    ///
+    /// ```
+    /// use std::path::Path;
+    /// use std::ffi::OsStr;
+    ///
+    /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.").file_name());
+    /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.//").file_name());
+    /// assert_eq!(None, Path::new("foo.txt/..").file_name());
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn file_name(&self) -> Option<&OsStr> {
         self.components().next_back().and_then(|p| {
index be9cd6a688858ff2fc6b8da382babd1d0c496892..de891ea89189a26e40fbe9745f8d6e62c60e71ff 100644 (file)
@@ -506,6 +506,9 @@ mod prim_f64 { }
 ///
 /// *[See also the `std::i8` module](i8/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `i64` in there.
+///
 mod prim_i8 { }
 
 #[doc(primitive = "i16")]
@@ -514,6 +517,9 @@ mod prim_i8 { }
 ///
 /// *[See also the `std::i16` module](i16/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `i32` in there.
+///
 mod prim_i16 { }
 
 #[doc(primitive = "i32")]
@@ -522,6 +528,9 @@ mod prim_i16 { }
 ///
 /// *[See also the `std::i32` module](i32/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `i16` in there.
+///
 mod prim_i32 { }
 
 #[doc(primitive = "i64")]
@@ -530,6 +539,9 @@ mod prim_i32 { }
 ///
 /// *[See also the `std::i64` module](i64/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `i8` in there.
+///
 mod prim_i64 { }
 
 #[doc(primitive = "u8")]
@@ -538,6 +550,9 @@ mod prim_i64 { }
 ///
 /// *[See also the `std::u8` module](u8/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `u64` in there.
+///
 mod prim_u8 { }
 
 #[doc(primitive = "u16")]
@@ -546,6 +561,9 @@ mod prim_u8 { }
 ///
 /// *[See also the `std::u16` module](u16/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `u32` in there.
+///
 mod prim_u16 { }
 
 #[doc(primitive = "u32")]
@@ -554,6 +572,9 @@ mod prim_u16 { }
 ///
 /// *[See also the `std::u32` module](u32/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `u16` in there.
+///
 mod prim_u32 { }
 
 #[doc(primitive = "u64")]
@@ -562,6 +583,9 @@ mod prim_u32 { }
 ///
 /// *[See also the `std::u64` module](u64/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `u8` in there.
+///
 mod prim_u64 { }
 
 #[doc(primitive = "isize")]
@@ -570,6 +594,9 @@ mod prim_u64 { }
 ///
 /// *[See also the `std::isize` module](isize/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `usize` in there.
+///
 mod prim_isize { }
 
 #[doc(primitive = "usize")]
@@ -578,4 +605,7 @@ mod prim_isize { }
 ///
 /// *[See also the `std::usize` module](usize/index.html).*
 ///
+/// However, please note that examples are shared between primitive integer
+/// types. So it's normal if you see usage of types like `isize` in there.
+///
 mod prim_usize { }
index 274e495d70eb620b3f0d4e5f2ea91ee618825dfb..8b7af17f92b85b8a0038728827e7c1390abd9c4c 100644 (file)
@@ -152,9 +152,19 @@ pub fn lookup_host(host: &str) -> io::Result<LookupHost> {
     init();
 
     let c_host = CString::new(host)?;
+    let hints = c::addrinfo {
+        ai_flags: 0,
+        ai_family: 0,
+        ai_socktype: c::SOCK_STREAM,
+        ai_protocol: 0,
+        ai_addrlen: 0,
+        ai_addr: ptr::null_mut(),
+        ai_canonname: ptr::null_mut(),
+        ai_next: ptr::null_mut()
+    };
     let mut res = ptr::null_mut();
     unsafe {
-        cvt_gai(c::getaddrinfo(c_host.as_ptr(), ptr::null(), ptr::null(),
+        cvt_gai(c::getaddrinfo(c_host.as_ptr(), ptr::null(), &hints,
                                &mut res))?;
         Ok(LookupHost { original: res, cur: res })
     }
@@ -591,3 +601,22 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
             .finish()
     }
 }
+
+#[cfg(test)]
+mod tests {
+    use prelude::v1::*;
+
+    use super::*;
+    use collections::HashMap;
+
+    #[test]
+    fn no_lookup_host_duplicates() {
+        let mut addrs = HashMap::new();
+        let lh = match lookup_host("localhost") {
+            Ok(lh) => lh,
+            Err(e) => panic!("couldn't resolve `localhost': {}", e)
+        };
+        let _na = lh.map(|sa| *addrs.entry(sa).or_insert(0) += 1).count();
+        assert!(addrs.values().filter(|&&v| v > 1).count() == 0);
+    }
+}
index c670283e559d9b8c60451dc964c617302ccada20..b2b63d0dbb4bd76b4b8fe8ac3121adf460438d67 100644 (file)
@@ -237,7 +237,7 @@ fn mac_result<'a>(path: &ast::Path, ident: Option<Ident>, tts: Vec<TokenTree>, m
                     },
                 });
 
-                let marked_tts = mark_tts(tts, mark);
+                let marked_tts = mark_tts(&tts, mark);
                 Some(expandfun.expand(fld.cx, call_site, &marked_tts))
             }
 
@@ -257,7 +257,7 @@ fn mac_result<'a>(path: &ast::Path, ident: Option<Ident>, tts: Vec<TokenTree>, m
                     }
                 });
 
-                let marked_tts = mark_tts(tts, mark);
+                let marked_tts = mark_tts(&tts, mark);
                 Some(expander.expand(fld.cx, call_site, ident, marked_tts))
             }
 
@@ -769,7 +769,11 @@ fn expand_annotatable(mut item: Annotatable, fld: &mut MacroExpander) -> SmallVe
             };
 
             fld.cx.bt_pop();
-            modified.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect()
+            let configured = modified.into_iter().flat_map(|it| {
+                it.fold_with(&mut fld.strip_unconfigured())
+            }).collect::<SmallVector<_>>();
+
+            configured.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect()
         }
     }
 }
@@ -1126,7 +1130,7 @@ fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac {
         Spanned {
             node: Mac_ {
                 path: self.fold_path(node.path),
-                tts: self.fold_tts(node.tts),
+                tts: self.fold_tts(&node.tts),
             },
             span: self.new_span(span),
         }
@@ -1141,7 +1145,7 @@ fn new_span(&mut self, mut span: Span) -> Span {
 }
 
 // apply a given mark to the given token trees. Used prior to expansion of a macro.
-fn mark_tts(tts: Vec<TokenTree>, m: Mrk) -> Vec<TokenTree> {
+fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> {
     noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
 }
 
index 68527b0797d5b6031da5ca6dcc4a2d9eb2c24341..ffc950d76dd27f43047bb06e54def08e5f265266 100644 (file)
@@ -32,6 +32,7 @@ pub mod rt {
     use ext::base::ExtCtxt;
     use parse::{self, token, classify};
     use ptr::P;
+    use std::rc::Rc;
 
     use tokenstream::{self, TokenTree};
 
@@ -215,12 +216,12 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
             if self.node.style == ast::AttrStyle::Inner {
                 r.push(TokenTree::Token(self.span, token::Not));
             }
-            r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
+            r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
                 delim: token::Bracket,
                 open_span: self.span,
                 tts: self.node.value.to_tokens(cx),
                 close_span: self.span,
-            }));
+            })));
             r
         }
     }
@@ -235,12 +236,12 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
 
     impl ToTokens for () {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
+            vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
                 delim: token::Paren,
                 open_span: DUMMY_SP,
                 tts: vec![],
                 close_span: DUMMY_SP,
-            })]
+            }))]
         }
     }
 
@@ -791,9 +792,14 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
                                 id_ext("tokenstream"),
                                 id_ext("SequenceRepetition")];
             let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
+            let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
+                                                        id_ext("rc"),
+                                                        id_ext("Rc"),
+                                                        id_ext("new")],
+                                                   vec![e_seq_struct]);
             let e_tok = cx.expr_call(sp,
                                      mk_tt_path(cx, sp, "Sequence"),
-                                     vec!(e_sp, e_seq_struct));
+                                     vec!(e_sp, e_rc_new));
             let e_push =
                 cx.expr_method_call(sp,
                                     cx.expr_ident(sp, id_ext("tt")),
index 23f0b1fff0ae72b79c28763d4bfab6befbfc8706..84572b84963f3812511a2efdb4c8622d875a4297 100644 (file)
@@ -28,6 +28,7 @@
 use std::cell::RefCell;
 use std::collections::{HashMap};
 use std::collections::hash_map::{Entry};
+use std::rc::Rc;
 
 struct ParserAnyMacro<'a> {
     parser: RefCell<Parser<'a>>,
@@ -262,7 +263,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
     let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt"));
     let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt"));
     let argument_gram = vec![
-        TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
             tts: vec![
                 TokenTree::Token(DUMMY_SP, match_lhs_tok),
                 TokenTree::Token(DUMMY_SP, token::FatArrow),
@@ -271,14 +272,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
             separator: Some(token::Semi),
             op: tokenstream::KleeneOp::OneOrMore,
             num_captures: 2,
-        }),
+        })),
         // to phase into semicolon-termination instead of semicolon-separation
-        TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
             tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
             separator: None,
             op: tokenstream::KleeneOp::ZeroOrMore,
             num_captures: 0
-        }),
+        })),
     ];
 
     // Parse the macro_rules! invocation (`none` is for no interpolations):
index 40944a9a1c2d360bf13c78a9b1a45d742f2fb998..7c0d10669f30e108e6e173510ebbd2a9e8fc601c 100644 (file)
@@ -79,11 +79,11 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
     let mut r = TtReader {
         sp_diag: sp_diag,
         stack: vec!(TtFrame {
-            forest: TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+            forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
                 tts: src,
                 // doesn't matter. This merely holds the root unzipping.
                 separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
-            }),
+            })),
             idx: 0,
             dotdotdoted: false,
             sep: None,
index ed6f09eed645f7741951542aaeea87665667266c..ac3d643b185cac725fd7139d14ae4541970a3be6 100644 (file)
@@ -28,6 +28,8 @@
 use util::small_vector::SmallVector;
 use util::move_map::MoveMap;
 
+use std::rc::Rc;
+
 pub trait Folder : Sized {
     // Any additions to this trait should happen in form
     // of a call to a public `noop_*` function that only calls
@@ -222,11 +224,11 @@ fn fold_ty_params(&mut self, tps: P<[TyParam]>) -> P<[TyParam]> {
         noop_fold_ty_params(tps, self)
     }
 
-    fn fold_tt(&mut self, tt: TokenTree) -> TokenTree {
+    fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree {
         noop_fold_tt(tt, self)
     }
 
-    fn fold_tts(&mut self, tts: Vec<TokenTree>) -> Vec<TokenTree> {
+    fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec<TokenTree> {
         noop_fold_tts(tts, self)
     }
 
@@ -501,7 +503,7 @@ pub fn noop_fold_mac<T: Folder>(Spanned {node, span}: Mac, fld: &mut T) -> Mac {
     Spanned {
         node: Mac_ {
             path: fld.fold_path(node.path),
-            tts: fld.fold_tts(node.tts),
+            tts: fld.fold_tts(&node.tts),
         },
         span: fld.new_span(span)
     }
@@ -528,26 +530,32 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
     }
 }
 
-pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
-    match tt {
+pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
+    match *tt {
         TokenTree::Token(span, ref tok) =>
             TokenTree::Token(span, fld.fold_token(tok.clone())),
-        TokenTree::Delimited(span, delimed) => TokenTree::Delimited(span, Delimited {
-            delim: delimed.delim,
-            open_span: delimed.open_span,
-            tts: fld.fold_tts(delimed.tts),
-            close_span: delimed.close_span,
-        }),
-        TokenTree::Sequence(span, seq) => TokenTree::Sequence(span, SequenceRepetition {
-            tts: fld.fold_tts(seq.tts),
-            separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
-            ..seq
-        }),
+        TokenTree::Delimited(span, ref delimed) => {
+            TokenTree::Delimited(span, Rc::new(
+                            Delimited {
+                                delim: delimed.delim,
+                                open_span: delimed.open_span,
+                                tts: fld.fold_tts(&delimed.tts),
+                                close_span: delimed.close_span,
+                            }
+                        ))
+        },
+        TokenTree::Sequence(span, ref seq) =>
+            TokenTree::Sequence(span,
+                       Rc::new(SequenceRepetition {
+                           tts: fld.fold_tts(&seq.tts),
+                           separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
+                           ..**seq
+                       })),
     }
 }
 
-pub fn noop_fold_tts<T: Folder>(tts: Vec<TokenTree>, fld: &mut T) -> Vec<TokenTree> {
-    tts.move_map(|tt| fld.fold_tt(tt))
+pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
+    tts.iter().map(|tt| fld.fold_tt(tt)).collect()
 }
 
 // apply ident folder if it's an ident, apply other folds to interpolated nodes
@@ -605,7 +613,7 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
             token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
         token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
         token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
-        token::NtTT(tt) => token::NtTT(tt.map(|tt| fld.fold_tt(tt))),
+        token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
         token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
         token::NtImplItem(arm) =>
             token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm)
index bbcc044d43c6b74b8701924847df75867f0a8ec6..9502bc48a3e110f84e0ddf107a3a3181ec92c06d 100644 (file)
@@ -662,6 +662,7 @@ pub fn integer_lit(s: &str,
 #[cfg(test)]
 mod tests {
     use super::*;
+    use std::rc::Rc;
     use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
     use codemap::Spanned;
     use ast::{self, PatKind};
@@ -763,7 +764,7 @@ fn string_to_tts_macro () {
                             )
                             if first_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
-                            _ => panic!("value 3: {:?}", *first_delimed),
+                            _ => panic!("value 3: {:?}", **first_delimed),
                         }
                         let tts = &second_delimed.tts[..];
                         match (tts.len(), tts.get(0), tts.get(1)) {
@@ -774,10 +775,10 @@ fn string_to_tts_macro () {
                             )
                             if second_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
-                            _ => panic!("value 4: {:?}", *second_delimed),
+                            _ => panic!("value 4: {:?}", **second_delimed),
                         }
                     },
-                    _ => panic!("value 2: {:?}", *macro_delimed),
+                    _ => panic!("value 2: {:?}", **macro_delimed),
                 }
             },
             _ => panic!("value: {:?}",tts),
@@ -793,7 +794,7 @@ fn string_to_tts_1() {
             TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
-                tokenstream::Delimited {
+                Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
@@ -802,10 +803,10 @@ fn string_to_tts_1() {
                         TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
                     ],
                     close_span: sp(13, 14),
-                }),
+                })),
             TokenTree::Delimited(
                 sp(15, 21),
-                tokenstream::Delimited {
+                Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
@@ -813,7 +814,7 @@ fn string_to_tts_1() {
                         TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
-                })
+                }))
         ];
 
         assert_eq!(tts, expected);
index a06270bb7727a4ac3c5927c0afdbbe25f55750e8..e4875b7c244fd9cb8f641fee92312b9954a0dc07 100644 (file)
@@ -495,64 +495,6 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
         }
     }
 
-    /// Check for erroneous `ident { }`; if matches, signal error and
-    /// recover (without consuming any expected input token).  Returns
-    /// true if and only if input was consumed for recovery.
-    pub fn check_for_erroneous_unit_struct_expecting(&mut self,
-                                                     expected: &[token::Token])
-                                                     -> bool {
-        if self.token == token::OpenDelim(token::Brace)
-            && expected.iter().all(|t| *t != token::OpenDelim(token::Brace))
-            && self.look_ahead(1, |t| *t == token::CloseDelim(token::Brace)) {
-            // matched; signal non-fatal error and recover.
-            let span = self.span;
-            self.span_err(span, "unit-like struct construction is written with no trailing `{ }`");
-            self.eat(&token::OpenDelim(token::Brace));
-            self.eat(&token::CloseDelim(token::Brace));
-            true
-        } else {
-            false
-        }
-    }
-
-    /// Commit to parsing a complete expression `e` expected to be
-    /// followed by some token from the set edible + inedible.  Recover
-    /// from anticipated input errors, discarding erroneous characters.
-    pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token],
-                       inedible: &[token::Token]) -> PResult<'a, ()> {
-        debug!("commit_expr {:?}", e);
-        if let ExprKind::Path(..) = e.node {
-            // might be unit-struct construction; check for recoverableinput error.
-            let expected = edible.iter()
-                .cloned()
-                .chain(inedible.iter().cloned())
-                .collect::<Vec<_>>();
-            self.check_for_erroneous_unit_struct_expecting(&expected[..]);
-        }
-        self.expect_one_of(edible, inedible)
-    }
-
-    pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) -> PResult<'a, ()> {
-        self.commit_expr(e, &[edible], &[])
-    }
-
-    /// Commit to parsing a complete statement `s`, which expects to be
-    /// followed by some token from the set edible + inedible.  Check
-    /// for recoverable input errors, discarding erroneous characters.
-    pub fn commit_stmt(&mut self, edible: &[token::Token],
-                       inedible: &[token::Token]) -> PResult<'a, ()> {
-        if self.last_token
-               .as_ref()
-               .map_or(false, |t| t.is_ident() || t.is_path()) {
-            let expected = edible.iter()
-                .cloned()
-                .chain(inedible.iter().cloned())
-                .collect::<Vec<_>>();
-            self.check_for_erroneous_unit_struct_expecting(&expected);
-        }
-        self.expect_one_of(edible, inedible)
-    }
-
     /// returns the span of expr, if it was not interpolated or the span of the interpolated token
     fn interpolated_or_expr_span(&self,
                                  expr: PResult<'a, P<Expr>>)
@@ -1247,7 +1189,7 @@ pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> {
             let default = if self.check(&token::Eq) {
                 self.bump();
                 let expr = self.parse_expr()?;
-                self.commit_expr_expecting(&expr, token::Semi)?;
+                self.expect(&token::Semi)?;
                 Some(expr)
             } else {
                 self.expect(&token::Semi)?;
@@ -2195,8 +2137,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                 let mut trailing_comma = false;
                 while self.token != token::CloseDelim(token::Paren) {
                     es.push(self.parse_expr()?);
-                    self.commit_expr(&es.last().unwrap(), &[],
-                                     &[token::Comma, token::CloseDelim(token::Paren)])?;
+                    self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
                     if self.check(&token::Comma) {
                         trailing_comma = true;
 
@@ -2407,9 +2348,8 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                                     }
                                 }
 
-                                match self.commit_expr(&fields.last().unwrap().expr,
-                                                       &[token::Comma],
-                                                       &[token::CloseDelim(token::Brace)]) {
+                                match self.expect_one_of(&[token::Comma],
+                                                         &[token::CloseDelim(token::Brace)]) {
                                     Ok(()) => {}
                                     Err(mut e) => {
                                         e.emit();
@@ -2662,7 +2602,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: BytePos) -> PResult<
                 self.bump();
                 let ix = self.parse_expr()?;
                 hi = self.span.hi;
-                self.commit_expr_expecting(&ix, token::CloseDelim(token::Bracket))?;
+                self.expect(&token::CloseDelim(token::Bracket))?;
                 let index = self.mk_index(e, ix);
                 e = self.mk_expr(lo, hi, index, ThinVec::new())
               }
@@ -2688,12 +2628,13 @@ fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
                     )?;
                     let (sep, repeat) = self.parse_sep_and_kleene_op()?;
                     let name_num = macro_parser::count_names(&seq);
-                    return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), SequenceRepetition {
-                        tts: seq,
-                        separator: sep,
-                        op: repeat,
-                        num_captures: name_num
-                    }));
+                    return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
+                                      Rc::new(SequenceRepetition {
+                                          tts: seq,
+                                          separator: sep,
+                                          op: repeat,
+                                          num_captures: name_num
+                                      })));
                 } else if self.token.is_keyword(keywords::Crate) {
                     self.bump();
                     return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
@@ -2848,12 +2789,12 @@ pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                     _ => {}
                 }
 
-                Ok(TokenTree::Delimited(span, Delimited {
+                Ok(TokenTree::Delimited(span, Rc::new(Delimited {
                     delim: delim,
                     open_span: open_span,
                     tts: tts,
                     close_span: close_span,
-                }))
+                })))
             },
             _ => {
                 // invariants: the current token is not a left-delimiter,
@@ -3328,8 +3269,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
         let lo = self.last_span.lo;
         let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL,
                                                None)?;
-        if let Err(mut e) = self.commit_expr_expecting(&discriminant,
-                                                       token::OpenDelim(token::Brace)) {
+        if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
             if self.token == token::Token::Semi {
                 e.span_note(match_span, "did you mean to remove this `match` keyword?");
             }
@@ -3375,7 +3315,7 @@ pub fn parse_arm(&mut self) -> PResult<'a, Arm> {
             && self.token != token::CloseDelim(token::Brace);
 
         if require_comma {
-            self.commit_expr(&expr, &[token::Comma], &[token::CloseDelim(token::Brace)])?;
+            self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])?;
         } else {
             self.eat(&token::Comma);
         }
@@ -4117,7 +4057,7 @@ fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P<
                 _ => { // all other kinds of statements:
                     let mut hi = span.hi;
                     if classify::stmt_ends_with_semi(&node) {
-                        self.commit_stmt(&[token::Semi], &[])?;
+                        self.expect(&token::Semi)?;
                         hi = self.last_span.hi;
                     }
 
@@ -4195,7 +4135,7 @@ fn handle_expression_like_statement(&mut self,
         if classify::expr_requires_semi_to_be_stmt(&e) {
             // Just check for errors and recover; do not eat semicolon yet.
             if let Err(mut e) =
-                self.commit_stmt(&[], &[token::Semi, token::CloseDelim(token::Brace)])
+                self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
             {
                 e.emit();
                 self.recover_stmt();
@@ -4862,7 +4802,7 @@ pub fn parse_impl_item(&mut self) -> PResult<'a, ImplItem> {
             let typ = self.parse_ty_sum()?;
             self.expect(&token::Eq)?;
             let expr = self.parse_expr()?;
-            self.commit_expr_expecting(&expr, token::Semi)?;
+            self.expect(&token::Semi)?;
             (name, ast::ImplItemKind::Const(typ, expr))
         } else {
             let (name, inner_attrs, node) = self.parse_impl_method(&vis)?;
@@ -5286,7 +5226,7 @@ fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
         let ty = self.parse_ty_sum()?;
         self.expect(&token::Eq)?;
         let e = self.parse_expr()?;
-        self.commit_expr_expecting(&e, token::Semi)?;
+        self.expect(&token::Semi)?;
         let item = match m {
             Some(m) => ItemKind::Static(ty, m, e),
             None => ItemKind::Const(ty, e),
index 35377d14bab7cc0fe4618fb7be541311b1a987c9..0ad09fd0f7dfba492e8146a64a17ed0a25eb1f69 100644 (file)
@@ -8,18 +8,36 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! # Token Trees
-//! TokenTrees are syntactic forms for dealing with tokens. The description below is
-//! more complete; in short a TokenTree is a single token, a delimited sequence of token
-//! trees, or a sequence with repetition for list splicing as part of macro expansion.
+//! # Token Streams
+//!
+//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
+//! which are themselves either a single Token, a Delimited subsequence of tokens,
+//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
+//! expansion).
+//!
+//! A TokenStream also has a slice view, `TokenSlice`, that is analogous to `str` for
+//! `String`: it allows the programmer to divvy up, explore, and otherwise partition a
+//! TokenStream as borrowed subsequences.
 
-use ast::{AttrStyle};
-use codemap::{Span};
+use ast::{self, AttrStyle, LitKind};
+use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION};
+use codemap::Spanned;
 use ext::base;
 use ext::tt::macro_parser;
 use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use parse::lexer;
-use parse::token;
+use parse;
+use parse::token::{self, Token, Lit, InternedString, Nonterminal};
+use parse::token::Lit as TokLit;
+
+use std::fmt;
+use std::mem;
+use std::ops::Index;
+use std::ops;
+use std::iter::*;
+
+use std::rc::Rc;
 
 /// A delimited sequence of token trees
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -54,6 +72,11 @@ pub fn open_tt(&self) -> TokenTree {
     pub fn close_tt(&self) -> TokenTree {
         TokenTree::Token(self.close_span, self.close_token())
     }
+
+    /// Returns the token trees inside the delimiters.
+    pub fn subtrees(&self) -> &[TokenTree] {
+        &self.tts
+    }
 }
 
 /// A sequence of token trees
@@ -89,18 +112,16 @@ pub enum KleeneOp {
 ///
 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
 pub enum TokenTree {
     /// A single token
     Token(Span, token::Token),
     /// A delimited sequence of token trees
-    Delimited(Span, Delimited),
+    Delimited(Span, Rc<Delimited>),
 
     // This only makes sense in MBE macros.
-
     /// A kleene-style repetition sequence with a span
-    // FIXME(eddyb) #12938 Use DST.
-    Sequence(Span, SequenceRepetition),
+    Sequence(Span, Rc<SequenceRepetition>),
 }
 
 impl TokenTree {
@@ -109,28 +130,22 @@ pub fn len(&self) -> usize {
             TokenTree::Token(_, token::DocComment(name)) => {
                 match doc_comment_style(&name.as_str()) {
                     AttrStyle::Outer => 2,
-                    AttrStyle::Inner => 3
+                    AttrStyle::Inner => 3,
                 }
             }
             TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
             TokenTree::Token(_, token::MatchNt(..)) => 3,
-            TokenTree::Delimited(_, ref delimed) => {
-                delimed.tts.len() + 2
-            }
-            TokenTree::Sequence(_, ref seq) => {
-                seq.tts.len()
-            }
-            TokenTree::Token(..) => 0
+            TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
+            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
+            TokenTree::Token(..) => 0,
         }
     }
 
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
-            (&TokenTree::Token(sp, token::DocComment(_)), 0) => {
-                TokenTree::Token(sp, token::Pound)
-            }
+            (&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound),
             (&TokenTree::Token(sp, token::DocComment(name)), 1)
-            if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
+                if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
                 TokenTree::Token(sp, token::Not)
             }
             (&TokenTree::Token(sp, token::DocComment(name)), _) => {
@@ -138,18 +153,21 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
 
                 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
                 // required to wrap the text.
-                let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
-                    *cnt = if x == '"' {
-                        1
-                    } else if *cnt != 0 && x == '#' {
-                        *cnt + 1
-                    } else {
-                        0
-                    };
-                    Some(*cnt)
-                }).max().unwrap_or(0);
+                let num_of_hashes = stripped.chars()
+                    .scan(0, |cnt, x| {
+                        *cnt = if x == '"' {
+                            1
+                        } else if *cnt != 0 && x == '#' {
+                            *cnt + 1
+                        } else {
+                            0
+                        };
+                        Some(*cnt)
+                    })
+                    .max()
+                    .unwrap_or(0);
 
-                TokenTree::Delimited(sp, Delimited {
+                TokenTree::Delimited(sp, Rc::new(Delimited {
                     delim: token::Bracket,
                     open_span: sp,
                     tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
@@ -157,7 +175,7 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
                               TokenTree::Token(sp, token::Literal(
                                   token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
                     close_span: sp,
-                })
+                }))
             }
             (&TokenTree::Delimited(_, ref delimed), _) => {
                 if index == 0 {
@@ -179,24 +197,24 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
                          TokenTree::Token(sp, token::Ident(kind))];
                 v[index].clone()
             }
-            (&TokenTree::Sequence(_, ref seq), _) => {
-                seq.tts[index].clone()
-            }
-            _ => panic!("Cannot expand a token tree")
+            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
+            _ => panic!("Cannot expand a token tree"),
         }
     }
 
     /// Returns the `Span` corresponding to this token tree.
     pub fn get_span(&self) -> Span {
         match *self {
-            TokenTree::Token(span, _)     => span,
+            TokenTree::Token(span, _) => span,
             TokenTree::Delimited(span, _) => span,
-            TokenTree::Sequence(span, _)  => span,
+            TokenTree::Sequence(span, _) => span,
         }
     }
 
     /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
+    pub fn parse(cx: &base::ExtCtxt,
+                 mtch: &[TokenTree],
+                 tts: &[TokenTree])
                  -> macro_parser::NamedParseResult {
         // `None` is because we're not interpolating
         let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
@@ -206,5 +224,1071 @@ pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
                                                          true);
         macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
     }
+
+    /// Check if this TokenTree is equal to the other, regardless of span information.
+    pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
+        match (self, other) {
+            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
+            (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
+                (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
+                {
+                    for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
+                        if !tt1.eq_unspanned(tt2) {
+                            return false;
+                        }
+                    }
+                    true
+                }
+            }
+            (_, _) => false,
+        }
+    }
+
+    /// Retrieve the TokenTree's span.
+    pub fn span(&self) -> Span {
+        match *self {
+            TokenTree::Token(sp, _) |
+            TokenTree::Delimited(sp, _) |
+            TokenTree::Sequence(sp, _) => sp,
+        }
+    }
+
+    /// Indicates if the stream is a token that is equal to the provided token.
+    pub fn eq_token(&self, t: Token) -> bool {
+        match *self {
+            TokenTree::Token(_, ref tk) => *tk == t,
+            _ => false,
+        }
+    }
+
+    /// Indicates if the token is an identifier.
+    pub fn is_ident(&self) -> bool {
+        self.maybe_ident().is_some()
+    }
+
+    /// Returns an identifier.
+    pub fn maybe_ident(&self) -> Option<ast::Ident> {
+        match *self {
+            TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
+            TokenTree::Delimited(_, ref dl) => {
+                let tts = dl.subtrees();
+                if tts.len() != 1 {
+                    return None;
+                }
+                tts[0].maybe_ident()
+            }
+            _ => None,
+        }
+    }
+
+    /// Returns a Token literal.
+    pub fn maybe_lit(&self) -> Option<token::Lit> {
+        match *self {
+            TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
+            TokenTree::Delimited(_, ref dl) => {
+                let tts = dl.subtrees();
+                if tts.len() != 1 {
+                    return None;
+                }
+                tts[0].maybe_lit()
+            }
+            _ => None,
+        }
+    }
+
+    /// Returns an AST string literal.
+    pub fn maybe_str(&self) -> Option<ast::Lit> {
+        match *self {
+            TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
+                                     ast::StrStyle::Cooked);
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
+                                     ast::StrStyle::Raw(n));
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            _ => None,
+        }
+    }
+}
+
+/// #Token Streams
+///
+/// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural
+/// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we
+/// are going to cut a few corners (i.e., use some of the AST structure) when we need to
+/// for backwards compatibility.
+
+/// TokenStreams are collections of TokenTrees that represent a syntactic structure. The
+/// struct itself shouldn't be directly manipulated; the internal structure is not stable,
+/// and may be changed at any time in the future. The operators will not, however (except
+/// for signatures, later on).
+#[derive(Eq,Clone,Hash,RustcEncodable,RustcDecodable)]
+pub struct TokenStream {
+    pub span: Span,
+    pub tts: Vec<TokenTree>,
+}
+
+impl fmt::Debug for TokenStream {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        if self.tts.len() == 0 {
+            write!(f, "([empty")?;
+        } else {
+            write!(f, "([")?;
+            write!(f, "{:?}", self.tts[0])?;
+
+            for tt in self.tts.iter().skip(1) {
+                write!(f, ",{:?}", tt)?;
+            }
+        }
+        write!(f, "|")?;
+        self.span.fmt(f)?;
+        write!(f, "])")
+    }
+}
+
+/// Checks if two TokenStreams are equivalent (including spans). For unspanned
+/// equality, see `eq_unspanned`.
+impl PartialEq<TokenStream> for TokenStream {
+    fn eq(&self, other: &TokenStream) -> bool {
+        self.tts == other.tts
+    }
+}
+
+// NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span
+// will be at {2,13}. Without finer-grained span structures, however, this seems to be
+// our only recourse.
+// FIXME Do something smarter to compute the expansion id.
+fn covering_span(trees: &[TokenTree]) -> Span {
+    // disregard any dummy spans we have
+    let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>();
+
+    // if we're out of spans, stop
+    if trees.len() < 1 {
+        return DUMMY_SP;
+    }
+
+    // set up the initial values
+    let fst_span = trees[0].span();
+
+    let mut lo_span = fst_span.lo;
+    let mut hi_span = fst_span.hi;
+    let mut expn_id = fst_span.expn_id;
+
+    // compute the spans iteratively
+    for t in trees.iter().skip(1) {
+        let sp = t.span();
+        if sp.lo < lo_span {
+            lo_span = sp.lo;
+        }
+        if hi_span < sp.hi {
+            hi_span = sp.hi;
+        }
+        if expn_id != sp.expn_id {
+            expn_id = NO_EXPANSION;
+        }
+    }
+
+    Span {
+        lo: lo_span,
+        hi: hi_span,
+        expn_id: expn_id,
+    }
 }
 
+/// TokenStream operators include basic destructuring, boolean operations, `maybe_...`
+/// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward,
+/// indicating information about the structure of the stream. The `maybe_...` operations
+/// return `Some<...>` if the tokenstream contains the appropriate item.
+///
+/// Similarly, the `maybe_..._prefix` operations potentially return a
+/// partially-destructured stream as a pair where the first element is the expected item
+/// and the second is the remainder of the stream. As anb example,
+///
+///    `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")`
+impl TokenStream {
+    /// Convert a vector of `TokenTree`s into a `TokenStream`.
+    pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream {
+        let span = covering_span(&trees);
+        TokenStream {
+            tts: trees,
+            span: span,
+        }
+    }
+
+    /// Copies all of the TokenTrees from the TokenSlice, appending them to the stream.
+    pub fn append_stream(mut self, ts2: &TokenSlice) {
+        for tt in ts2.iter() {
+            self.tts.push(tt.clone());
+        }
+        self.span = covering_span(&self.tts[..]);
+    }
+
+    /// Manually change a TokenStream's span.
+    pub fn respan(self, span: Span) -> TokenStream {
+        TokenStream {
+            tts: self.tts,
+            span: span,
+        }
+    }
+
+    /// Construct a TokenStream from an ast literal.
+    pub fn from_ast_lit_str(lit: ast::Lit) -> Option<TokenStream> {
+        match lit.node {
+            LitKind::Str(val, _) => {
+                let val = TokLit::Str_(token::intern(&val));
+                Some(TokenStream::from_tts(vec![TokenTree::Token(lit.span,
+                                                                 Token::Literal(val, None))]))
+            }
+            _ => None,
+        }
+
+    }
+
+    /// Convert a vector of TokenTrees into a parentheses-delimited TokenStream.
+    pub fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream {
+        let new_sp = covering_span(&tts);
+
+        let new_delim = Rc::new(Delimited {
+            delim: token::DelimToken::Paren,
+            open_span: DUMMY_SP,
+            tts: tts,
+            close_span: DUMMY_SP,
+        });
+
+        TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)])
+    }
+
+    /// Convert an interned string into a one-element TokenStream.
+    pub fn from_interned_string_as_ident(s: InternedString) -> TokenStream {
+        TokenStream::from_tts(vec![TokenTree::Token(DUMMY_SP,
+                                                    Token::Ident(token::str_to_ident(&s[..])))])
+    }
+}
+
+/// TokenSlices are 'views' of `TokenStream's; they fit the same role as `str`s do for
+/// `String`s. In general, most TokenStream manipulations will be refocusing their internal
+/// contents by taking a TokenSlice and then using indexing and the provided operators.
+#[derive(PartialEq, Eq, Debug)]
+pub struct TokenSlice([TokenTree]);
+
+impl ops::Deref for TokenStream {
+    type Target = TokenSlice;
+
+    fn deref(&self) -> &TokenSlice {
+        let tts: &[TokenTree] = &*self.tts;
+        unsafe { mem::transmute(tts) }
+    }
+}
+
+impl TokenSlice {
+    /// Convert a borrowed TokenTree slice into a borrowed TokenSlice.
+    fn from_tts(tts: &[TokenTree]) -> &TokenSlice {
+        unsafe { mem::transmute(tts) }
+    }
+
+    /// Indicates whether the `TokenStream` is empty.
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Return the `TokenSlice`'s length.
+    pub fn len(&self) -> usize {
+        self.0.len()
+    }
+
+    /// Check equality versus another TokenStream, ignoring span information.
+    pub fn eq_unspanned(&self, other: &TokenSlice) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+        for (tt1, tt2) in self.iter().zip(other.iter()) {
+            if !tt1.eq_unspanned(tt2) {
+                return false;
+            }
+        }
+        true
+    }
+
+    /// Compute a span that covers the entire TokenSlice (eg, one wide enough to include
+    /// the entire slice). If the inputs share expansion identification, it is preserved.
+    /// If they do not, it is discarded.
+    pub fn covering_span(&self) -> Span {
+        covering_span(&self.0)
+    }
+
+    /// Indicates where the stream is of the form `= <ts>`, where `<ts>` is a continued
+    /// `TokenStream`.
+    pub fn is_assignment(&self) -> bool {
+        self.maybe_assignment().is_some()
+    }
+
+    /// Returns the RHS of an assigment.
+    pub fn maybe_assignment(&self) -> Option<&TokenSlice> {
+        if !(self.len() > 1) {
+            return None;
+        }
+
+        Some(&self[1..])
+    }
+
+    /// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or
+    /// `{a,b,c}`).
+    pub fn is_delimited(&self) -> bool {
+        self.maybe_delimited().is_some()
+    }
+
+    /// Returns the inside of the delimited term as a new TokenStream.
+    pub fn maybe_delimited(&self) -> Option<&TokenSlice> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Delimited(_, ref rc) => Some(TokenSlice::from_tts(&*rc.tts)),
+            _ => None,
+        }
+    }
+
+    /// Returns a list of `TokenSlice`s if the stream is a delimited list, breaking the
+    /// stream on commas.
+    pub fn maybe_comma_list(&self) -> Option<Vec<&TokenSlice>> {
+        let maybe_tts = self.maybe_delimited();
+
+        let ts: &TokenSlice;
+        match maybe_tts {
+            Some(t) => {
+                ts = t;
+            }
+            None => {
+                return None;
+            }
+        }
+
+        let splits: Vec<&TokenSlice> = ts.split(|x| match *x {
+                TokenTree::Token(_, Token::Comma) => true,
+                _ => false,
+            })
+            .filter(|x| x.len() > 0)
+            .collect();
+
+        Some(splits)
+    }
+
+    /// Returns a Nonterminal if it is Interpolated.
+    pub fn maybe_interpolated_nonterminal(&self) -> Option<Nonterminal> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Token(_, Token::Interpolated(ref nt)) => Some(nt.clone()),
+            _ => None,
+        }
+    }
+
+    /// Indicates if the stream is exactly one identifier.
+    pub fn is_ident(&self) -> bool {
+        self.maybe_ident().is_some()
+    }
+
+    /// Returns an identifier
+    pub fn maybe_ident(&self) -> Option<ast::Ident> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        let tok = if let Some(tts) = self.maybe_delimited() {
+            if tts.len() != 1 {
+                return None;
+            }
+            &tts[0]
+        } else {
+            &self[0]
+        };
+
+        match *tok {
+            TokenTree::Token(_, Token::Ident(t)) => Some(t),
+            _ => None,
+        }
+    }
+
+    /// Indicates if the stream is exactly one literal
+    pub fn is_lit(&self) -> bool {
+        self.maybe_lit().is_some()
+    }
+
+    /// Returns a literal
+    pub fn maybe_lit(&self) -> Option<token::Lit> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        let tok = if let Some(tts) = self.maybe_delimited() {
+            if tts.len() != 1 {
+                return None;
+            }
+            &tts[0]
+        } else {
+            &self[0]
+        };
+
+        match *tok {
+            TokenTree::Token(_, Token::Literal(l, _)) => Some(l),
+            _ => None,
+        }
+    }
+
+    /// Returns an AST string literal if the TokenStream is either a normal ('cooked') or
+    /// raw string literal.
+    pub fn maybe_str(&self) -> Option<ast::Lit> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
+                                     ast::StrStyle::Cooked);
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
+                                     ast::StrStyle::Raw(n));
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            _ => None,
+        }
+    }
+
+    /// This operation extracts the path prefix , returning an AST path struct and the remainder
+    /// of the stream (if it finds one). To be more specific, a tokenstream that has a valid,
+    /// non-global path as a prefix (eg `foo(bar, baz)`, `foo::bar(bar)`, but *not*
+    /// `::foo::bar(baz)`) will yield the path and the remaining tokens (as a slice). The previous
+    /// examples will yield
+    /// `Some((Path { segments = vec![foo], ... }, [(bar, baz)]))`,
+    /// `Some((Path { segments = vec![foo, bar] }, [(baz)]))`,
+    /// and `None`, respectively.
+    pub fn maybe_path_prefix(&self) -> Option<(ast::Path, &TokenSlice)> {
+        let mut segments: Vec<ast::PathSegment> = Vec::new();
+
+        let path: Vec<&TokenTree> = self.iter()
+            .take_while(|x| x.is_ident() || x.eq_token(Token::ModSep))
+            .collect::<Vec<&TokenTree>>();
+
+        let path_size = path.len();
+        if path_size == 0 {
+            return None;
+        }
+
+        let cov_span = self[..path_size].covering_span();
+        let rst = &self[path_size..];
+
+        let fst_id = path[0];
+
+        if let Some(id) = fst_id.maybe_ident() {
+            segments.push(ast::PathSegment {
+                identifier: id,
+                parameters: ast::PathParameters::none(),
+            });
+        } else {
+            return None;
+        }
+
+        // Let's use a state machine to parse out the rest.
+        enum State {
+            Mod, // Expect a `::`, or return None otherwise.
+            Ident, // Expect an ident, or return None otherwise.
+        }
+        let mut state = State::Mod;
+
+        for p in &path[1..] {
+            match state {
+                State::Mod => {
+                    // State 0: ['::' -> state 1, else return None]
+                    if p.eq_token(Token::ModSep) {
+                        state = State::Ident;
+                    } else {
+                        return None;
+                    }
+                }
+                State::Ident => {
+                    // State 1: [ident -> state 0, else return None]
+                    if let Some(id) = p.maybe_ident() {
+                        segments.push(ast::PathSegment {
+                            identifier: id,
+                            parameters: ast::PathParameters::none(),
+                        });
+                        state = State::Mod;
+                    } else {
+                        return None;
+                    }
+                }
+            }
+        }
+
+        let path = ast::Path {
+            span: cov_span,
+            global: false,
+            segments: segments,
+        };
+        Some((path, rst))
+    }
+
+    /// Returns an iterator over a TokenSlice (as a sequence of TokenStreams).
+    fn iter(&self) -> Iter {
+        Iter { vs: self }
+    }
+
+    /// Splits a TokenSlice based on the provided `&TokenTree -> bool` predicate.
+    fn split<P>(&self, pred: P) -> Split<P>
+        where P: FnMut(&TokenTree) -> bool
+    {
+        Split {
+            vs: self,
+            pred: pred,
+            finished: false,
+        }
+    }
+}
+
+pub struct Iter<'a> {
+    vs: &'a TokenSlice,
+}
+
+impl<'a> Iterator for Iter<'a> {
+    type Item = &'a TokenTree;
+
+    fn next(&mut self) -> Option<&'a TokenTree> {
+        if self.vs.is_empty() {
+            return None;
+        }
+
+        let ret = Some(&self.vs[0]);
+        self.vs = &self.vs[1..];
+        ret
+    }
+}
+
+pub struct Split<'a, P>
+    where P: FnMut(&TokenTree) -> bool
+{
+    vs: &'a TokenSlice,
+    pred: P,
+    finished: bool,
+}
+
+impl<'a, P> Iterator for Split<'a, P>
+    where P: FnMut(&TokenTree) -> bool
+{
+    type Item = &'a TokenSlice;
+
+    fn next(&mut self) -> Option<&'a TokenSlice> {
+        if self.finished {
+            return None;
+        }
+
+        match self.vs.iter().position(|x| (self.pred)(x)) {
+            None => {
+                self.finished = true;
+                Some(&self.vs[..])
+            }
+            Some(idx) => {
+                let ret = Some(&self.vs[..idx]);
+                self.vs = &self.vs[idx + 1..];
+                ret
+            }
+        }
+    }
+}
+
+impl Index<usize> for TokenStream {
+    type Output = TokenTree;
+
+    fn index(&self, index: usize) -> &TokenTree {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::Range<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeTo<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeFrom<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeFull> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
+        Index::index(&**self, _index)
+    }
+}
+
+impl Index<usize> for TokenSlice {
+    type Output = TokenTree;
+
+    fn index(&self, index: usize) -> &TokenTree {
+        &self.0[index]
+    }
+}
+
+impl ops::Index<ops::Range<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeTo<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeFrom<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeFull> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[_index])
+    }
+}
+
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use ast;
+    use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
+    use parse::token::{self, str_to_ident, Token, Lit};
+    use util::parser_testing::string_to_tts;
+    use std::rc::Rc;
+
+    fn sp(a: u32, b: u32) -> Span {
+        Span {
+            lo: BytePos(a),
+            hi: BytePos(b),
+            expn_id: NO_EXPANSION,
+        }
+    }
+
+    #[test]
+    fn test_is_empty() {
+        let test0 = TokenStream::from_tts(Vec::new());
+        let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
+                                                                Token::Ident(str_to_ident("a")))]);
+        let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+
+        assert_eq!(test0.is_empty(), true);
+        assert_eq!(test1.is_empty(), false);
+        assert_eq!(test2.is_empty(), false);
+    }
+
+    #[test]
+    fn test_is_delimited() {
+        let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        assert_eq!(test0.is_delimited(), false);
+        assert_eq!(test1.is_delimited(), true);
+        assert_eq!(test2.is_delimited(), true);
+        assert_eq!(test3.is_delimited(), false);
+        assert_eq!(test4.is_delimited(), false);
+        assert_eq!(test5.is_delimited(), false);
+    }
+
+    #[test]
+    fn test_is_assign() {
+        let test0 = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("= 5".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
+        let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        assert_eq!(test0.is_assignment(), true);
+        assert_eq!(test1.is_assignment(), true);
+        assert_eq!(test2.is_assignment(), true);
+        assert_eq!(test3.is_assignment(), false);
+        assert_eq!(test4.is_assignment(), true);
+        assert_eq!(test5.is_assignment(), false);
+    }
+
+    #[test]
+    fn test_is_lit() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
+
+        assert_eq!(test0.is_lit(), true);
+        assert_eq!(test1.is_lit(), true);
+        assert_eq!(test2.is_lit(), false);
+        assert_eq!(test3.is_lit(), false);
+        assert_eq!(test4.is_lit(), false);
+    }
+
+    #[test]
+    fn test_is_ident() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
+
+        assert_eq!(test0.is_ident(), false);
+        assert_eq!(test1.is_ident(), false);
+        assert_eq!(test2.is_ident(), true);
+        assert_eq!(test3.is_ident(), false);
+        assert_eq!(test4.is_ident(), false);
+    }
+
+    #[test]
+    fn test_maybe_assignment() {
+        let test0_input = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("= 5".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        let test0 = test0_input.maybe_assignment();
+        let test1 = test1_input.maybe_assignment();
+        let test2 = test2_input.maybe_assignment();
+        let test3 = test3_input.maybe_assignment();
+        let test4 = test4_input.maybe_assignment();
+        let test5 = test5_input.maybe_assignment();
+
+        let test0_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(5, 7), token::ModSep),
+                                       TokenTree::Token(sp(7, 10),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test0, Some(&test0_expected[..]));
+
+        let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
+                                            token::Literal(Lit::Str_(token::intern("5")), None))]);
+        assert_eq!(test1, Some(&test1_expected[..]));
+
+        let test2_expected = TokenStream::from_tts(vec![TokenTree::Token( sp(2,3)
+                                       , token::Literal(
+                                           Lit::Integer(
+                                             token::intern(&(5.to_string()))),
+                                             None))]);
+        assert_eq!(test2, Some(&test2_expected[..]));
+
+        assert_eq!(test3, None);
+
+
+        let test4_tts = vec![TokenTree::Token(sp(3, 6), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(6, 7), token::Comma),
+                             TokenTree::Token(sp(7, 10), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(10, 11), token::Comma),
+                             TokenTree::Token(sp(11, 14), token::Ident(str_to_ident("baz")))];
+
+        let test4_expected = TokenStream::from_tts(vec![TokenTree::Delimited(sp(2, 15),
+                                                Rc::new(Delimited {
+                                                    delim: token::DelimToken::Paren,
+                                                    open_span: sp(2, 3),
+                                                    tts: test4_tts,
+                                                    close_span: sp(14, 15),
+                                                }))]);
+        assert_eq!(test4, Some(&test4_expected[..]));
+
+        assert_eq!(test5, None);
+
+    }
+
+    #[test]
+    fn test_maybe_delimited() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
+            .to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        let test0 = test0_input.maybe_delimited();
+        let test1 = test1_input.maybe_delimited();
+        let test2 = test2_input.maybe_delimited();
+        let test3 = test3_input.maybe_delimited();
+        let test4 = test4_input.maybe_delimited();
+        let test5 = test5_input.maybe_delimited();
+
+        assert_eq!(test0, None);
+
+        let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test1, Some(&test1_expected[..]));
+
+        let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo"))),
+                                       TokenTree::Token(sp(4, 5), token::Comma),
+                                       TokenTree::Token(sp(5, 8),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(8, 9), token::Comma),
+                                       TokenTree::Token(sp(9, 12),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test2, Some(&test2_expected[..]));
+
+        assert_eq!(test3, None);
+
+        assert_eq!(test4, None);
+
+        assert_eq!(test5, None);
+    }
+
+    #[test]
+    fn test_maybe_comma_list() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo::bar,bar,baz)".to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
+            .to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test6_input = TokenStream::from_tts(string_to_tts("".to_string()));
+        // The following is supported behavior!
+        let test7_input = TokenStream::from_tts(string_to_tts("(foo,bar,)".to_string()));
+
+        let test0 = test0_input.maybe_comma_list();
+        let test1 = test1_input.maybe_comma_list();
+        let test2 = test2_input.maybe_comma_list();
+        let test3 = test3_input.maybe_comma_list();
+        let test4 = test4_input.maybe_comma_list();
+        let test5 = test5_input.maybe_comma_list();
+        let test6 = test6_input.maybe_comma_list();
+        let test7 = test7_input.maybe_comma_list();
+
+        assert_eq!(test0, None);
+
+        let test1_stream = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("baz")))]);
+
+        let test1_expected: Vec<&TokenSlice> = vec![&test1_stream[..]];
+        assert_eq!(test1, Some(test1_expected));
+
+        let test2_foo = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo")))]);
+        let test2_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(5, 8),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test2_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(9, 12),
+                                                        token::Ident(str_to_ident("baz")))]);
+        let test2_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..], &test2_baz[..]];
+        assert_eq!(test2, Some(test2_expected));
+
+        let test3_path = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test3_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(10, 13),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test3_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(14, 17),
+                                                        token::Ident(str_to_ident("baz")))]);
+        let test3_expected: Vec<&TokenSlice> =
+            vec![&test3_path[..], &test3_bar[..], &test3_baz[..]];
+        assert_eq!(test3, Some(test3_expected));
+
+        assert_eq!(test4, None);
+
+        assert_eq!(test5, None);
+
+        assert_eq!(test6, None);
+
+
+        let test7_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..]];
+        assert_eq!(test7, Some(test7_expected));
+    }
+
+    // pub fn maybe_ident(&self) -> Option<ast::Ident>
+    #[test]
+    fn test_maybe_ident() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident();
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident();
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident();
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident();
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident();
+
+        assert_eq!(test0, None);
+        assert_eq!(test1, None);
+        assert_eq!(test2, Some(str_to_ident("foo")));
+        assert_eq!(test3, None);
+        assert_eq!(test4, None);
+    }
+
+    // pub fn maybe_lit(&self) -> Option<token::Lit>
+    #[test]
+    fn test_maybe_lit() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_lit();
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_lit();
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_lit();
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_lit();
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_lit();
+
+        assert_eq!(test0, Some(Lit::Str_(token::intern("foo"))));
+        assert_eq!(test1, Some(Lit::Integer(token::intern(&(5.to_string())))));
+        assert_eq!(test2, None);
+        assert_eq!(test3, None);
+        assert_eq!(test4, None);
+    }
+
+    #[test]
+    fn test_maybe_path_prefix() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("foo::bar(bar,baz)".to_string()));
+
+        let test0 = test0_input.maybe_path_prefix();
+        let test1 = test1_input.maybe_path_prefix();
+        let test2 = test2_input.maybe_path_prefix();
+        let test3 = test3_input.maybe_path_prefix();
+
+        let test0_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(7, 9), token::ModSep),
+                             TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("baz")))];
+
+        let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(3, 13),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: sp(3, 4),
+                                                                   tts: test0_tts,
+                                                                   close_span: sp(12, 13),
+                                                               }))]);
+
+        let test0_expected = Some((ast::Path::from_ident(sp(0, 3), str_to_ident("foo")),
+                                   &test0_stream[..]));
+        assert_eq!(test0, test0_expected);
+
+        assert_eq!(test1, None);
+        assert_eq!(test2, None);
+
+        let test3_path = ast::Path {
+            span: sp(0, 8),
+            global: false,
+            segments: vec![ast::PathSegment {
+                               identifier: str_to_ident("foo"),
+                               parameters: ast::PathParameters::none(),
+                           },
+                           ast::PathSegment {
+                               identifier: str_to_ident("bar"),
+                               parameters: ast::PathParameters::none(),
+                           }],
+        };
+
+        let test3_tts = vec![TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(12, 13), token::Comma),
+                             TokenTree::Token(sp(13, 16), token::Ident(str_to_ident("baz")))];
+
+        let test3_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(8, 17),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: sp(8, 9),
+                                                                   tts: test3_tts,
+                                                                   close_span: sp(16, 17),
+                                                               }))]);
+        let test3_expected = Some((test3_path, &test3_stream[..]));
+        assert_eq!(test3, test3_expected);
+    }
+
+    #[test]
+    fn test_as_paren_delimited_stream() {
+        let test0 = TokenStream::as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
+        let test1 = TokenStream::as_paren_delimited_stream(string_to_tts("baz(foo,bar)"
+            .to_string()));
+
+        let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(3, 4), token::Comma),
+                             TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(7, 8), token::Comma)];
+        let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: DUMMY_SP,
+                                                                   tts: test0_tts,
+                                                                   close_span: DUMMY_SP,
+                                                               }))]);
+
+        assert_eq!(test0, test0_stream);
+
+
+        let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(7, 8), token::Comma),
+                             TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))];
+
+        let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))),
+                               TokenTree::Delimited(sp(3, 12),
+                                                    Rc::new(Delimited {
+                                                        delim: token::DelimToken::Paren,
+                                                        open_span: sp(3, 4),
+                                                        tts: test1_tts,
+                                                        close_span: sp(11, 12),
+                                                    }))];
+
+        let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: DUMMY_SP,
+                                                                   tts: test1_parse,
+                                                                   close_span: DUMMY_SP,
+                                                               }))]);
+
+        assert_eq!(test1, test1_stream);
+    }
+
+}
index 56a8c28ffedc2fe6547bca57842d320da96d388a..9cf456062385fcce660b6595b30e20594eaa5e20 100644 (file)
@@ -192,7 +192,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
 
                     if OPTIONS.iter().any(|&opt| s == opt) {
                         cx.span_warn(p.last_span, "expected a clobber, found an option");
+                    } else if s.starts_with("{") || s.ends_with("}") {
+                        cx.span_err(p.last_span, "clobber should not be surrounded by braces");
                     }
+
                     clobs.push(s);
                 }
             }
index 80ad955b60b3ac02d0462a4a65fcea597d0ebfb1..7ca76af03bb04659562890d6b4f223fffe0d748f 160000 (submodule)
--- a/src/llvm
+++ b/src/llvm
@@ -1 +1 @@
-Subproject commit 80ad955b60b3ac02d0462a4a65fcea597d0ebfb1
+Subproject commit 7ca76af03bb04659562890d6b4f223fffe0d748f
index 4017c3856c465a77672945d4f81420dd80f53ad9..1953fc5a6b48c3c9008973e89e61f095263bd486 100644 (file)
@@ -1,4 +1,4 @@
 # If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
 # The actual contents of this file do not matter, but to trigger a change on the
 # build bots then the contents should be changed so git updates the mtime.
-2016-04-28
+2016-06-23
index 546bb235a5f501c4b67f68d2482bf1a899d361ae..2b5ac7e8d80dec5efd06e5c570a74780f4216305 100644 (file)
@@ -8,6 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// In the current version of the collector that still has to support
+// legacy-trans, closures do not generate their own TransItems, so we are
+// ignoring this test until MIR trans has taken over completely
+// ignore-test
+
 // ignore-tidy-linelength
 // compile-flags:-Zprint-trans-items=eager
 
index ba77266d07248213059a742f7c85efe75c801362..278e9189dd6a72b8cc2ffdb3dfa6def8557111e9 100644 (file)
@@ -8,6 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// In the current version of the collector that still has to support
+// legacy-trans, closures do not generate their own TransItems, so we are
+// ignoring this test until MIR trans has taken over completely
+// ignore-test
+
 // ignore-tidy-linelength
 // compile-flags:-Zprint-trans-items=eager
 
index 5262d31ae0dcabbbd4db466d61f692b73b48a860..910ffd2959ed089400ec7691b95710e808db1631 100644 (file)
 // aux-build:cgu_extern_drop_glue.rs
 extern crate cgu_extern_drop_glue;
 
-//~ TRANS_ITEM drop-glue cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[OnceODR] extern_drop_glue-mod1[OnceODR]
-//~ TRANS_ITEM drop-glue-contents cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[OnceODR] extern_drop_glue-mod1[OnceODR]
+//~ TRANS_ITEM drop-glue cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[Internal] extern_drop_glue-mod1[Internal]
+//~ TRANS_ITEM drop-glue-contents cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[Internal] extern_drop_glue-mod1[Internal]
 
 struct LocalStruct(cgu_extern_drop_glue::Struct);
 
-//~ TRANS_ITEM fn extern_drop_glue::user[0] @@ extern_drop_glue[WeakODR]
+//~ TRANS_ITEM fn extern_drop_glue::user[0] @@ extern_drop_glue[External]
 fn user()
 {
-    //~ TRANS_ITEM drop-glue extern_drop_glue::LocalStruct[0] @@ extern_drop_glue[OnceODR]
+    //~ TRANS_ITEM drop-glue extern_drop_glue::LocalStruct[0] @@ extern_drop_glue[Internal]
     let _ = LocalStruct(cgu_extern_drop_glue::Struct(0));
 }
 
@@ -37,10 +37,10 @@ mod mod1 {
 
     struct LocalStruct(cgu_extern_drop_glue::Struct);
 
-    //~ TRANS_ITEM fn extern_drop_glue::mod1[0]::user[0] @@ extern_drop_glue-mod1[WeakODR]
+    //~ TRANS_ITEM fn extern_drop_glue::mod1[0]::user[0] @@ extern_drop_glue-mod1[External]
     fn user()
     {
-        //~ TRANS_ITEM drop-glue extern_drop_glue::mod1[0]::LocalStruct[0] @@ extern_drop_glue-mod1[OnceODR]
+        //~ TRANS_ITEM drop-glue extern_drop_glue::mod1[0]::LocalStruct[0] @@ extern_drop_glue-mod1[Internal]
         let _ = LocalStruct(cgu_extern_drop_glue::Struct(0));
     }
 }
index 6beed231df993704c3472b8e72c7e47030db173b..58f904f48a17dad25d5d4625182610bec2fbffce 100644 (file)
@@ -19,7 +19,7 @@
 // aux-build:cgu_generic_function.rs
 extern crate cgu_generic_function;
 
-//~ TRANS_ITEM fn extern_generic::user[0] @@ extern_generic[WeakODR]
+//~ TRANS_ITEM fn extern_generic::user[0] @@ extern_generic[External]
 fn user() {
     let _ = cgu_generic_function::foo("abc");
 }
@@ -27,7 +27,7 @@ fn user() {
 mod mod1 {
     use cgu_generic_function;
 
-    //~ TRANS_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[WeakODR]
+    //~ TRANS_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[External]
     fn user() {
         let _ = cgu_generic_function::foo("abc");
     }
@@ -35,7 +35,7 @@ fn user() {
     mod mod1 {
         use cgu_generic_function;
 
-        //~ TRANS_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[External]
         fn user() {
             let _ = cgu_generic_function::foo("abc");
         }
@@ -45,20 +45,20 @@ fn user() {
 mod mod2 {
     use cgu_generic_function;
 
-    //~ TRANS_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[WeakODR]
+    //~ TRANS_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[External]
     fn user() {
         let _ = cgu_generic_function::foo("abc");
     }
 }
 
 mod mod3 {
-    //~ TRANS_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[WeakODR]
+    //~ TRANS_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[External]
     fn non_user() {}
 }
 
 // Make sure the two generic functions from the extern crate get instantiated
 // privately in every module they are use in.
-//~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ extern_generic[OnceODR] extern_generic-mod1[OnceODR] extern_generic-mod2[OnceODR] extern_generic-mod1-mod1[OnceODR]
-//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ extern_generic[OnceODR] extern_generic-mod1[OnceODR] extern_generic-mod2[OnceODR] extern_generic-mod1-mod1[OnceODR]
+//~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ extern_generic[Internal] extern_generic-mod1[Internal] extern_generic-mod2[Internal] extern_generic-mod1-mod1[Internal]
+//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ extern_generic[Internal] extern_generic-mod1[Internal] extern_generic-mod2[Internal] extern_generic-mod1-mod1[Internal]
 
 //~ TRANS_ITEM drop-glue i8
index 967824f31d0456279a5396cded3edfafa7926741..118513f65541b30e046a4c8a882b368e28eb19e3 100644 (file)
 // This test makes sure that items inlined from external crates are privately
 // instantiated in every codegen unit they are used in.
 
-//~ TRANS_ITEM fn cgu_explicit_inlining::inlined[0] @@ inlining_from_extern_crate[OnceODR] inlining_from_extern_crate-mod1[OnceODR]
-//~ TRANS_ITEM fn cgu_explicit_inlining::always_inlined[0] @@ inlining_from_extern_crate[OnceODR] inlining_from_extern_crate-mod2[OnceODR]
+//~ TRANS_ITEM fn cgu_explicit_inlining::inlined[0] @@ inlining_from_extern_crate[Internal] inlining_from_extern_crate-mod1[Internal]
+//~ TRANS_ITEM fn cgu_explicit_inlining::always_inlined[0] @@ inlining_from_extern_crate[Internal] inlining_from_extern_crate-mod2[Internal]
 
-//~ TRANS_ITEM fn inlining_from_extern_crate::user[0] @@ inlining_from_extern_crate[WeakODR]
+//~ TRANS_ITEM fn inlining_from_extern_crate::user[0] @@ inlining_from_extern_crate[External]
 pub fn user()
 {
     cgu_explicit_inlining::inlined();
@@ -37,7 +37,7 @@ pub fn user()
 mod mod1 {
     use cgu_explicit_inlining;
 
-    //~ TRANS_ITEM fn inlining_from_extern_crate::mod1[0]::user[0] @@ inlining_from_extern_crate-mod1[WeakODR]
+    //~ TRANS_ITEM fn inlining_from_extern_crate::mod1[0]::user[0] @@ inlining_from_extern_crate-mod1[External]
     pub fn user()
     {
         cgu_explicit_inlining::inlined();
@@ -50,7 +50,7 @@ pub fn user()
 mod mod2 {
     use cgu_explicit_inlining;
 
-    //~ TRANS_ITEM fn inlining_from_extern_crate::mod2[0]::user[0] @@ inlining_from_extern_crate-mod2[WeakODR]
+    //~ TRANS_ITEM fn inlining_from_extern_crate::mod2[0]::user[0] @@ inlining_from_extern_crate-mod2[External]
     pub fn user()
     {
         cgu_explicit_inlining::always_inlined();
index 04ebef645ec9836ec648b279ec1548f30128b070..f61e3fe12931e0ddb6041ca5d240f3f804b01236 100644 (file)
 #![allow(dead_code)]
 #![crate_type="lib"]
 
-//~ TRANS_ITEM drop-glue local_drop_glue::Struct[0] @@ local_drop_glue[OnceODR] local_drop_glue-mod1[OnceODR]
-//~ TRANS_ITEM drop-glue-contents local_drop_glue::Struct[0] @@ local_drop_glue[OnceODR] local_drop_glue-mod1[OnceODR]
+//~ TRANS_ITEM drop-glue local_drop_glue::Struct[0] @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal]
+//~ TRANS_ITEM drop-glue-contents local_drop_glue::Struct[0] @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal]
 struct Struct {
     _a: u32
 }
 
 impl Drop for Struct {
-    //~ TRANS_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[WeakODR]
+    //~ TRANS_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[External]
     fn drop(&mut self) {}
 }
 
-//~ TRANS_ITEM drop-glue local_drop_glue::Outer[0] @@ local_drop_glue[OnceODR]
+//~ TRANS_ITEM drop-glue local_drop_glue::Outer[0] @@ local_drop_glue[Internal]
 struct Outer {
     _a: Struct
 }
 
-//~ TRANS_ITEM fn local_drop_glue::user[0] @@ local_drop_glue[WeakODR]
+//~ TRANS_ITEM fn local_drop_glue::user[0] @@ local_drop_glue[External]
 fn user()
 {
     let _ = Outer {
@@ -46,14 +46,14 @@ mod mod1
 {
     use super::Struct;
 
-    //~ TRANS_ITEM drop-glue local_drop_glue::mod1[0]::Struct2[0] @@ local_drop_glue-mod1[OnceODR]
+    //~ TRANS_ITEM drop-glue local_drop_glue::mod1[0]::Struct2[0] @@ local_drop_glue-mod1[Internal]
     struct Struct2 {
         _a: Struct,
-        //~ TRANS_ITEM drop-glue (u32, local_drop_glue::Struct[0]) @@ local_drop_glue-mod1[OnceODR]
+        //~ TRANS_ITEM drop-glue (u32, local_drop_glue::Struct[0]) @@ local_drop_glue-mod1[Internal]
         _b: (u32, Struct),
     }
 
-    //~ TRANS_ITEM fn local_drop_glue::mod1[0]::user[0] @@ local_drop_glue-mod1[WeakODR]
+    //~ TRANS_ITEM fn local_drop_glue::mod1[0]::user[0] @@ local_drop_glue-mod1[External]
     fn user()
     {
         let _ = Struct2 {
index f5641f1f2ed7376210ca0a73e2bc34f11c3a840b..2d744169d3f8e5800a086956b8201953ba66347c 100644 (file)
 #![allow(dead_code)]
 #![crate_type="lib"]
 
-// Used in different modules/codegen units but always instantiated in the same
-// codegen unit.
-
-//~ TRANS_ITEM fn local_generic::generic[0]<u32> @@ local_generic.volatile[WeakODR]
-//~ TRANS_ITEM fn local_generic::generic[0]<u64> @@ local_generic.volatile[WeakODR]
-//~ TRANS_ITEM fn local_generic::generic[0]<char> @@ local_generic.volatile[WeakODR]
-//~ TRANS_ITEM fn local_generic::generic[0]<&str> @@ local_generic.volatile[WeakODR]
+//~ TRANS_ITEM fn local_generic::generic[0]<u32> @@ local_generic[Internal]
+//~ TRANS_ITEM fn local_generic::generic[0]<u64> @@ local_generic-mod1[Internal]
+//~ TRANS_ITEM fn local_generic::generic[0]<char> @@ local_generic-mod1-mod1[Internal]
+//~ TRANS_ITEM fn local_generic::generic[0]<&str> @@ local_generic-mod2[Internal]
 pub fn generic<T>(x: T) -> T { x }
 
-//~ TRANS_ITEM fn local_generic::user[0] @@ local_generic[WeakODR]
+//~ TRANS_ITEM fn local_generic::user[0] @@ local_generic[External]
 fn user() {
     let _ = generic(0u32);
 }
@@ -33,7 +30,7 @@ fn user() {
 mod mod1 {
     pub use super::generic;
 
-    //~ TRANS_ITEM fn local_generic::mod1[0]::user[0] @@ local_generic-mod1[WeakODR]
+    //~ TRANS_ITEM fn local_generic::mod1[0]::user[0] @@ local_generic-mod1[External]
     fn user() {
         let _ = generic(0u64);
     }
@@ -41,7 +38,7 @@ fn user() {
     mod mod1 {
         use super::generic;
 
-        //~ TRANS_ITEM fn local_generic::mod1[0]::mod1[0]::user[0] @@ local_generic-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn local_generic::mod1[0]::mod1[0]::user[0] @@ local_generic-mod1-mod1[External]
         fn user() {
             let _ = generic('c');
         }
@@ -51,7 +48,7 @@ fn user() {
 mod mod2 {
     use super::generic;
 
-    //~ TRANS_ITEM fn local_generic::mod2[0]::user[0] @@ local_generic-mod2[WeakODR]
+    //~ TRANS_ITEM fn local_generic::mod2[0]::user[0] @@ local_generic-mod2[External]
     fn user() {
         let _ = generic("abc");
     }
index 880cc0a4fb47a003c75cbb719825687b1aab2044..5eb1cbc2199f759bccd5003c3275b0c1e7417d5c 100644 (file)
@@ -19,7 +19,7 @@
 mod inline {
 
     // Important: This function should show up in all codegen units where it is inlined
-    //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-inline[WeakODR] local_inlining-user1[Available] local_inlining-user2[Available]
+    //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-inline[External] local_inlining-user1[Available] local_inlining-user2[Available]
     #[inline(always)]
     pub fn inlined_function()
     {
@@ -30,7 +30,7 @@ pub fn inlined_function()
 mod user1 {
     use super::inline;
 
-    //~ TRANS_ITEM fn local_inlining::user1[0]::foo[0] @@ local_inlining-user1[WeakODR]
+    //~ TRANS_ITEM fn local_inlining::user1[0]::foo[0] @@ local_inlining-user1[External]
     fn foo() {
         inline::inlined_function();
     }
@@ -39,7 +39,7 @@ fn foo() {
 mod user2 {
     use super::inline;
 
-    //~ TRANS_ITEM fn local_inlining::user2[0]::bar[0] @@ local_inlining-user2[WeakODR]
+    //~ TRANS_ITEM fn local_inlining::user2[0]::bar[0] @@ local_inlining-user2[External]
     fn bar() {
         inline::inlined_function();
     }
@@ -47,7 +47,7 @@ fn bar() {
 
 mod non_user {
 
-    //~ TRANS_ITEM fn local_inlining::non_user[0]::baz[0] @@ local_inlining-non_user[WeakODR]
+    //~ TRANS_ITEM fn local_inlining::non_user[0]::baz[0] @@ local_inlining-non_user[External]
     fn baz() {
 
     }
index f3efa2587d3d5d3f7518e6294d52a0c91f97e5cb..28c4698eabd1fe636f802e828447d433cbffb067 100644 (file)
@@ -18,7 +18,7 @@
 
 mod inline {
 
-    //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-inline[WeakODR] local_transitive_inlining-direct_user[Available] local_transitive_inlining-indirect_user[Available]
+    //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-inline[External] local_transitive_inlining-direct_user[Available] local_transitive_inlining-indirect_user[Available]
     #[inline(always)]
     pub fn inlined_function()
     {
@@ -29,7 +29,7 @@ pub fn inlined_function()
 mod direct_user {
     use super::inline;
 
-    //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-direct_user[WeakODR] local_transitive_inlining-indirect_user[Available]
+    //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-direct_user[External] local_transitive_inlining-indirect_user[Available]
     #[inline(always)]
     pub fn foo() {
         inline::inlined_function();
@@ -39,7 +39,7 @@ pub fn foo() {
 mod indirect_user {
     use super::direct_user;
 
-    //~ TRANS_ITEM fn local_transitive_inlining::indirect_user[0]::bar[0] @@ local_transitive_inlining-indirect_user[WeakODR]
+    //~ TRANS_ITEM fn local_transitive_inlining::indirect_user[0]::bar[0] @@ local_transitive_inlining-indirect_user[External]
     fn bar() {
         direct_user::foo();
     }
@@ -47,7 +47,7 @@ fn bar() {
 
 mod non_user {
 
-    //~ TRANS_ITEM fn local_transitive_inlining::non_user[0]::baz[0] @@ local_transitive_inlining-non_user[WeakODR]
+    //~ TRANS_ITEM fn local_transitive_inlining::non_user[0]::baz[0] @@ local_transitive_inlining-non_user[External]
     fn baz() {
 
     }
index 99dda0e38bad7aeb63ac64262a91136f9f4b1fcf..1ea5aafd401d20cff73e7d507e10e0e111510e1b 100644 (file)
@@ -8,6 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// Currently, all generic functions are instantiated in each codegen unit that
+// uses them, even those not marked with #[inline], so this test does not make
+// much sense at the moment.
+// ignore-test
+
 // ignore-tidy-linelength
 // We specify -Z incremental here because we want to test the partitioning for
 // incremental compilation
@@ -25,10 +30,10 @@ mod mod1 {
     // Even though the impl is in `mod1`, the methods should end up in the
     // parent module, since that is where their self-type is.
     impl SomeType {
-        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::method[0] @@ methods_are_with_self_type[WeakODR]
+        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::method[0] @@ methods_are_with_self_type[External]
         fn method(&self) {}
 
-        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::associated_fn[0] @@ methods_are_with_self_type[WeakODR]
+        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::associated_fn[0] @@ methods_are_with_self_type[External]
         fn associated_fn() {}
     }
 
index c3af86f820f18f20a3045e14bd95607188fca004..4da64110321688400ba7b734460abd4b5e344ada 100644 (file)
 #![allow(dead_code)]
 #![crate_type="lib"]
 
-//~ TRANS_ITEM fn regular_modules::foo[0] @@ regular_modules[WeakODR]
+//~ TRANS_ITEM fn regular_modules::foo[0] @@ regular_modules[External]
 fn foo() {}
 
-//~ TRANS_ITEM fn regular_modules::bar[0] @@ regular_modules[WeakODR]
+//~ TRANS_ITEM fn regular_modules::bar[0] @@ regular_modules[External]
 fn bar() {}
 
 //~ TRANS_ITEM static regular_modules::BAZ[0] @@ regular_modules[External]
@@ -27,26 +27,26 @@ fn bar() {}
 
 mod mod1 {
 
-    //~ TRANS_ITEM fn regular_modules::mod1[0]::foo[0] @@ regular_modules-mod1[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod1[0]::foo[0] @@ regular_modules-mod1[External]
     fn foo() {}
-    //~ TRANS_ITEM fn regular_modules::mod1[0]::bar[0] @@ regular_modules-mod1[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod1[0]::bar[0] @@ regular_modules-mod1[External]
     fn bar() {}
     //~ TRANS_ITEM static regular_modules::mod1[0]::BAZ[0] @@ regular_modules-mod1[External]
     static BAZ: u64 = 0;
 
     mod mod1 {
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::foo[0] @@ regular_modules-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::foo[0] @@ regular_modules-mod1-mod1[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::bar[0] @@ regular_modules-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::bar[0] @@ regular_modules-mod1-mod1[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod1[0]::mod1[0]::BAZ[0] @@ regular_modules-mod1-mod1[External]
         static BAZ: u64 = 0;
     }
 
     mod mod2 {
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::foo[0] @@ regular_modules-mod1-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::foo[0] @@ regular_modules-mod1-mod2[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::bar[0] @@ regular_modules-mod1-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::bar[0] @@ regular_modules-mod1-mod2[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod1[0]::mod2[0]::BAZ[0] @@ regular_modules-mod1-mod2[External]
         static BAZ: u64 = 0;
@@ -55,26 +55,26 @@ fn bar() {}
 
 mod mod2 {
 
-    //~ TRANS_ITEM fn regular_modules::mod2[0]::foo[0] @@ regular_modules-mod2[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod2[0]::foo[0] @@ regular_modules-mod2[External]
     fn foo() {}
-    //~ TRANS_ITEM fn regular_modules::mod2[0]::bar[0] @@ regular_modules-mod2[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod2[0]::bar[0] @@ regular_modules-mod2[External]
     fn bar() {}
     //~ TRANS_ITEM static regular_modules::mod2[0]::BAZ[0] @@ regular_modules-mod2[External]
     static BAZ: u64 = 0;
 
     mod mod1 {
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::foo[0] @@ regular_modules-mod2-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::foo[0] @@ regular_modules-mod2-mod1[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::bar[0] @@ regular_modules-mod2-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::bar[0] @@ regular_modules-mod2-mod1[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod2[0]::mod1[0]::BAZ[0] @@ regular_modules-mod2-mod1[External]
         static BAZ: u64 = 0;
     }
 
     mod mod2 {
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::foo[0] @@ regular_modules-mod2-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::foo[0] @@ regular_modules-mod2-mod2[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::bar[0] @@ regular_modules-mod2-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::bar[0] @@ regular_modules-mod2-mod2[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod2[0]::mod2[0]::BAZ[0] @@ regular_modules-mod2-mod2[External]
         static BAZ: u64 = 0;
index 9e878b95a369a043c4c2f68eeb308fa68a6fe177..ffe1ec278b8dd3e0b99075d4bf615a9acfec2b69 100644 (file)
@@ -21,7 +21,7 @@
 //~ TRANS_ITEM static statics::BAR[0] @@ statics[External]
 static BAR: u32 = 0;
 
-//~ TRANS_ITEM fn statics::function[0] @@ statics[WeakODR]
+//~ TRANS_ITEM fn statics::function[0] @@ statics[External]
 fn function() {
     //~ TRANS_ITEM static statics::function[0]::FOO[0] @@ statics[External]
     static FOO: u32 = 0;
@@ -37,7 +37,7 @@ mod mod1 {
     //~ TRANS_ITEM static statics::mod1[0]::BAR[0] @@ statics-mod1[External]
     static BAR: u32 = 0;
 
-    //~ TRANS_ITEM fn statics::mod1[0]::function[0] @@ statics-mod1[WeakODR]
+    //~ TRANS_ITEM fn statics::mod1[0]::function[0] @@ statics-mod1[External]
     fn function() {
         //~ TRANS_ITEM static statics::mod1[0]::function[0]::FOO[0] @@ statics-mod1[External]
         static FOO: u32 = 0;
index 83dd6a3b00258ec5ee4af95d2ff11877cf98cc73..25f8c130469973d888c617dca289abc639386b54 100644 (file)
@@ -31,13 +31,13 @@ pub fn droppy() {
 // that's one new drop call per call to possibly_unwinding(), and finally 3 drop calls for the
 // regular function exit. We used to have problems with quadratic growths of drop calls in such
 // functions.
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK-NOT: call{{.*}}SomeUniqueName{{.*}}drop
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK-NOT: call{{.*}}drop{{.*}}SomeUniqueName
 // The next line checks for the } that ends the function definition
 // CHECK-LABEL: {{^[}]}}
     let _s = SomeUniqueName;
index 4245786295b343fa14df160e11f5d447a65483be..3c32cb947b382f1107473fd6d81d0437c0ca927e 100644 (file)
@@ -16,6 +16,5 @@
 fn main() {
     match () {
         Trait { x: 42 } => () //~ ERROR expected variant, struct or type alias, found trait `Trait`
-        //~^ ERROR `Trait` does not name a struct or a struct variant
     }
 }
diff --git a/src/test/compile-fail/E0163.rs b/src/test/compile-fail/E0163.rs
deleted file mode 100644 (file)
index 5cb6f4d..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-enum Foo { B(u32) }
-
-fn bar(foo: Foo) -> u32 {
-    match foo {
-        Foo::B { i } => i, //~ ERROR E0163
-    }
-}
-
-fn main() {
-}
diff --git a/src/test/compile-fail/asm-bad-clobber.rs b/src/test/compile-fail/asm-bad-clobber.rs
new file mode 100644 (file)
index 0000000..714343a
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-android
+// ignore-arm
+// ignore-aarch64
+
+#![feature(asm, rustc_attrs)]
+
+#[cfg(any(target_arch = "x86",
+          target_arch = "x86_64"))]
+#[rustc_error]
+pub fn main() {
+    unsafe {
+        // clobber formatted as register input/output
+        asm!("xor %eax, %eax" : : : "{eax}");
+        //~^ ERROR clobber should not be surrounded by braces
+    }
+}
index 3100aba4b72be3cfa027c52d61feec6bb0f67470..1049bcd15644fcdd87d25ddec4475fb05263f94f 100644 (file)
@@ -10,6 +10,7 @@
 #![crate_name="lint_stability"]
 #![crate_type = "lib"]
 #![feature(staged_api)]
+#![feature(associated_type_defaults)]
 #![stable(feature = "lint_stability", since = "1.0.0")]
 
 #[stable(feature = "test_feature", since = "1.0.0")]
@@ -92,6 +93,15 @@ fn trait_stable(&self) {}
     fn trait_stable_text(&self) {}
 }
 
+#[stable(feature = "test_feature", since = "1.0.0")]
+pub trait TraitWithAssociatedTypes {
+    #[unstable(feature = "test_feature", issue = "0")]
+    type TypeUnstable = u8;
+    #[stable(feature = "test_feature", since = "1.0.0")]
+    #[rustc_deprecated(since = "1.0.0", reason = "text")]
+    type TypeDeprecated = u8;
+}
+
 #[stable(feature = "test_feature", since = "1.0.0")]
 impl Trait for MethodTester {}
 
index a5c740d9f638a65a94b8fbf9833102f8264bf472..74546152ca90fcf77b85c1742cd89768820c8e27 100644 (file)
@@ -31,12 +31,14 @@ fn main() {
         Empty1 => () // Not an error, `Empty1` is interpreted as a new binding
     }
     match e3 {
-        E::Empty3 => () //~ ERROR `E::Empty3` does not name a tuple variant or a tuple struct
+        E::Empty3 => ()
+        //~^ ERROR `E::Empty3` does not name a unit variant, unit struct or a constant
     }
     match xe1 {
         XEmpty1 => () // Not an error, `XEmpty1` is interpreted as a new binding
     }
     match xe3 {
-        XE::XEmpty3 => () //~ ERROR `XE::XEmpty3` does not name a tuple variant or a tuple struct
+        XE::XEmpty3 => ()
+        //~^ ERROR `XE::XEmpty3` does not name a unit variant, unit struct or a constant
     }
 }
index 4637512216c40b15d3e72110ff0dbcb4ad54e1e9..671232e701f872b33721803aba16349e5ca33ed1 100644 (file)
@@ -16,7 +16,7 @@ pub struct GslResult {
 
 impl GslResult {
     pub fn new() -> GslResult {
-        Result { //~ ERROR: `Result` does not name a structure
+        Result { //~ ERROR: `Result` does not name a struct or a struct variant
             val: 0f64,
             err: 0f64
         }
index 0fee6dc7617005341a60780c17278f4e1e8f903e..218f68714ff929821d1f9b5a975fa7ea0634afd8 100644 (file)
@@ -11,5 +11,5 @@
 mod foo {}
 
 fn main() {
-    let p = foo { x: () }; //~ ERROR `foo` does not name a structure
+    let p = foo { x: () }; //~ ERROR `foo` does not name a struct or a struct variant
 }
index db43c1cce9947fa348602ea021e5ccf204575760..2f2c252b947c90f38994c142985e6ae3fdd8cdb1 100644 (file)
@@ -15,6 +15,5 @@ enum Foo {
 fn main() {
     match Foo::Bar(1) {
         Foo { i } => () //~ ERROR expected variant, struct or type alias, found enum `Foo`
-        //~^ ERROR `Foo` does not name a struct or a struct variant
     }
 }
index 93c4f4bfcef8f78684d589600e47d84bbe290998..090b8a0d16e64ca9a890fde406512ce8d22c82bb 100644 (file)
@@ -11,5 +11,5 @@
 mod MyMod {}
 
 fn main() {
-    let myVar = MyMod { T: 0 }; //~ ERROR `MyMod` does not name a structure
+    let myVar = MyMod { T: 0 }; //~ ERROR `MyMod` does not name a struct or a struct variant
 }
diff --git a/src/test/compile-fail/issue-22933-1.rs b/src/test/compile-fail/issue-22933-1.rs
new file mode 100644 (file)
index 0000000..afb972f
--- /dev/null
@@ -0,0 +1,35 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+#![allow(warnings)]
+
+struct CNFParser {
+    token: char,
+}
+
+impl CNFParser {
+    fn is_whitespace(c: char) -> bool {
+        c == ' ' || c == '\n'
+    }
+
+    fn consume_whitespace(&mut self) {
+        self.consume_while(&(CNFParser::is_whitespace))
+    }
+
+    fn consume_while(&mut self, p: &Fn(char) -> bool) {
+        while p(self.token) {
+            return
+        }
+    }
+}
+
+#[rustc_error]
+fn main() {} //~ ERROR compilation successful
diff --git a/src/test/compile-fail/issue-22933-2.rs b/src/test/compile-fail/issue-22933-2.rs
new file mode 100644 (file)
index 0000000..7d619c2
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+enum Delicious {
+    Pie      = 0x1,
+    Apple    = 0x2,
+    ApplePie = Delicious::Apple as isize | Delicious::PIE as isize,
+    //~^ ERROR constant evaluation error: unresolved path in constant expression
+}
+
+const FOO: [u32; u8::MIN as usize] = [];
+//~^ ERROR array length constant evaluation error: unresolved path in constant expression
+
+fn main() {}
index 6cadbef33e7f0b4f9eb555b9d7425a9f8f9a2166..24b39eeff0f791c0e3508e77c6e99e82d11ec383 100644 (file)
@@ -12,6 +12,5 @@ fn main() {
     match 'a' {
         char{ch} => true
         //~^ ERROR expected variant, struct or type alias, found builtin type `char`
-        //~| ERROR `char` does not name a struct or a struct variant
     };
 }
index d2f9abd2e316b41539d235ebaaa6599f01cb5049..7a329bac61b22f64a0fa2eee6b1c512423005833 100644 (file)
 mod A {}
 
 fn main() {
-    let u = A { x: 1 }; //~ ERROR `A` does not name a structure
-    let v = u32 { x: 1 }; //~ ERROR `u32` does not name a structure
+    let u = A { x: 1 }; //~ ERROR `A` does not name a struct or a struct variant
+    let v = u32 { x: 1 }; //~ ERROR `u32` does not name a struct or a struct variant
     match () {
         A { x: 1 } => {} //~ ERROR expected variant, struct or type alias, found module `A`
-        //~^ ERROR `A` does not name a struct or a struct variant
         u32 { x: 1 } => {} //~ ERROR expected variant, struct or type alias, found builtin type `u32
-        //~^ ERROR `u32` does not name a struct or a struct variant
     }
 }
index d014c45ad2d1764e6e23c16e476a37909dff4ea9..e20e6ea23198cd3c885fa4bea2d8d90ec4a94567 100644 (file)
@@ -18,7 +18,7 @@ enum Enum {
 
 fn main() {
     let x = Foo(1);
-    Foo { ..x }; //~ ERROR `Foo` does not name a structure
+    Foo { ..x }; //~ ERROR `Foo` does not name a struct or a struct variant
     let Foo { .. } = x; //~ ERROR `Foo` does not name a struct
 
     let x = Bar;
index 8d74154655fcee4b9006a78b57786493f18aceef..576451f7292a894c1b8606e94401e5893849723b 100644 (file)
@@ -18,7 +18,7 @@ enum Foo {
 fn main() {
     match Foo::Baz {
         Foo::Bar => {}
-        //~^ ERROR `Foo::Bar` does not name a tuple variant or a tuple struct
+        //~^ ERROR `Foo::Bar` does not name a unit variant, unit struct or a constant
         _ => {}
     }
 
diff --git a/src/test/compile-fail/issue-34209.rs b/src/test/compile-fail/issue-34209.rs
new file mode 100644 (file)
index 0000000..6fae18d
--- /dev/null
@@ -0,0 +1,22 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+enum S {
+    A,
+}
+
+fn bug(l: S) {
+    match l {
+        S::B{ } => { },
+        //~^ ERROR ambiguous associated type; specify the type using the syntax `<S as Trait>::B`
+    }
+}
+
+fn main () {}
index 843ff38df49cb6527e53e5b6af644f1b6e6bb444..55983c672aa08a8553ce28932d2db5202f277b63 100644 (file)
@@ -11,5 +11,5 @@
 struct NonCopyable(());
 
 fn main() {
-    let z = NonCopyable{ p: () }; //~ ERROR `NonCopyable` does not name a structure
+    let z = NonCopyable{ p: () }; //~ ERROR `NonCopyable` does not name a struct or a struct variant
 }
index dbcd3f32f3b66f8af89f7029303d7344ce3147d8..505a91f223cc6ceb99afb37edf5540b923105301 100644 (file)
@@ -10,7 +10,7 @@
 
 struct T { i: i32 }
 fn f<T>() {
-    let t = T { i: 0 }; //~ ERROR `T` does not name a structure
+    let t = T { i: 0 }; //~ ERROR `T` does not name a struct or a struct variant
 }
 
 mod Foo {
index 414d2a857acc72b0f813ef977a9ffbe8166a1374..953cd4a2ff5eac744efb08592f9c7b13f58da167 100644 (file)
@@ -128,6 +128,11 @@ fn test() {
         <Foo>::trait_stable_text(&foo);
         <Foo as Trait>::trait_stable_text(&foo);
 
+        struct S1<T: TraitWithAssociatedTypes>(T::TypeUnstable);
+        //~^ ERROR use of unstable library feature
+        struct S2<T: TraitWithAssociatedTypes>(T::TypeDeprecated);
+        //~^ ERROR use of deprecated item
+
         let _ = DeprecatedStruct { //~ ERROR use of deprecated item
             i: 0 //~ ERROR use of deprecated item
         };
index faf6d255c9afc38c2b26ba93b5ea0105fbbcb59b..ef011c89c622ba184eb6e986834946681cb8165e 100644 (file)
@@ -22,12 +22,13 @@ impl MyTrait for Foo {}
 
 fn main() {
     match 0u32 {
-        Foo::bar => {} //~ ERROR E0327
+        Foo::bar => {} //~ ERROR `Foo::bar` does not name a unit variant, unit struct or a constant
     }
     match 0u32 {
-        <Foo>::bar => {} //~ ERROR E0327
+        <Foo>::bar => {} //~ ERROR `bar` does not name a unit variant, unit struct or a constant
     }
     match 0u32 {
-        <Foo>::trait_bar => {} //~ ERROR E0327
+        <Foo>::trait_bar => {}
+        //~^ ERROR `trait_bar` does not name a unit variant, unit struct or a constant
     }
 }
index 86873022f0ff10b666941c3cd86cdefcac106ffa..9034e24a6fee03fab57aa7f722658c8a72b806f5 100644 (file)
@@ -27,7 +27,8 @@ fn f<T>() {}
 
 fn main() {
     match 10 {
-        <S as Tr>::A::f::<u8> => {} //~ ERROR associated items in match patterns must be constants
+        <S as Tr>::A::f::<u8> => {}
+        //~^ ERROR `Tr::A::f<u8>` does not name a unit variant, unit struct or a constant
         0 ... <S as Tr>::A::f::<u8> => {} //~ ERROR only char and numeric types are allowed in range
     }
 }
diff --git a/src/test/compile-fail/struct-pat-associated-path.rs b/src/test/compile-fail/struct-pat-associated-path.rs
new file mode 100644 (file)
index 0000000..d3f840f
--- /dev/null
@@ -0,0 +1,37 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct S;
+
+trait Tr {
+    type A;
+}
+
+impl Tr for S {
+    type A = S;
+}
+
+fn f<T: Tr>() {
+    match S {
+        T::A {} => {} //~ ERROR `T::A` does not name a struct or a struct variant
+    }
+}
+
+fn g<T: Tr<A = S>>() {
+    match S {
+        T::A {} => {} //~ ERROR `T::A` does not name a struct or a struct variant
+    }
+}
+
+fn main() {
+    match S {
+        S::A {} => {} //~ ERROR ambiguous associated type
+    }
+}
index 67ccd6b7cd058dd787c75c09230f58a45b72359c..13fdaa302f70a72ee0a800f21b42a93de4c9050d 100644 (file)
@@ -12,5 +12,5 @@ trait TraitNotAStruct {}
 
 fn main() {
     TraitNotAStruct{ value: 0 };
-    //~^ ERROR: `TraitNotAStruct` does not name a structure [E0071]
+    //~^ ERROR: `TraitNotAStruct` does not name a struct or a struct variant [E0071]
 }
index 11d81eda55625960ae69fcca3483de8072e7de35..3f50811f826e07e83a37bd50d8b0fbfac40fc665 100644 (file)
@@ -72,6 +72,7 @@ fn expand_into_foo_multi(cx: &mut ExtCtxt,
                 ..(*quote_item!(cx, enum Foo2 { Bar2, Baz2 }).unwrap()).clone()
             })),
             Annotatable::Item(quote_item!(cx, enum Foo3 { Bar }).unwrap()),
+            Annotatable::Item(quote_item!(cx, #[cfg(any())] fn foo2() {}).unwrap()),
         ],
         Annotatable::ImplItem(it) => vec![
             quote_item!(cx, impl X { fn foo(&self) -> i32 { 42 } }).unwrap().and_then(|i| {
index d17adff007c6335b440c4c42063a753f99f2c91c..fe2317aabea68b43f60c53e0c1e81932b562fdf6 100644 (file)
@@ -21,6 +21,9 @@
 #[derive(PartialEq, Clone, Debug)]
 fn foo() -> AnotherFakeTypeThatHadBetterGoAway {}
 
+// Check that the `#[into_multi_foo]`-generated `foo2` is configured away
+fn foo2() {}
+
 trait Qux {
     #[into_multi_foo]
     fn bar();
diff --git a/src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs b/src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs
new file mode 100644 (file)
index 0000000..48fb05f
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub struct Request {
+    pub id: String,
+    pub arg: String,
+}
+
+pub fn decode<T>() -> Result<Request, ()> {
+    (|| {
+        Ok(Request {
+            id: "hi".to_owned(),
+            arg: match Err(()) {
+                Ok(v) => v,
+                Err(e) => return Err(e)
+            },
+        })
+    })()
+}
diff --git a/src/test/run-pass/issue-27021.rs b/src/test/run-pass/issue-27021.rs
new file mode 100644 (file)
index 0000000..eb7d529
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+    let mut c = (1, (1, "".to_owned()));
+    match c {
+        c2 => { (c.1).0 = 2; assert_eq!((c2.1).0, 1); }
+    }
+
+    let mut c = (1, (1, (1, "".to_owned())));
+    match c.1 {
+        c2 => { ((c.1).1).0 = 3; assert_eq!((c2.1).0, 1); }
+    }
+}
diff --git a/src/test/run-pass/issue34569.rs b/src/test/run-pass/issue34569.rs
new file mode 100644 (file)
index 0000000..41d02e9
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags:-g
+
+// In this test we just want to make sure that the code below does not lead to
+// a debuginfo verification assertion during compilation. This was caused by the
+// closure in the guard being translated twice due to how match expressions are
+// handled.
+//
+// See https://github.com/rust-lang/rust/issues/34569 for details.
+
+fn main() {
+    match 0 {
+        e if (|| { e == 0 })() => {},
+        1 => {},
+        _ => {}
+    }
+}
index 2371909b31b779f96d46f57240526edc607775a5..ca3294a87adbb1a0ffbbe2c88d16d536ddf30620 100644 (file)
@@ -171,6 +171,13 @@ fn test_fn_ignored_pair_named() -> (Foo, Foo) {
     id(ignored_pair_named())
 }
 
+#[rustc_mir]
+fn test_fn_nested_pair(x: &((f32, f32), u32)) -> (f32, f32) {
+    let y = *x;
+    let z = y.0;
+    (z.0, z.1)
+}
+
 fn main() {
     assert_eq!(test1(1, (2, 3), &[4, 5, 6]), (1, (2, 3), &[4, 5, 6][..]));
     assert_eq!(test2(98), 98);
@@ -196,4 +203,5 @@ fn main() {
 
     assert_eq!(test_fn_ignored_pair_0(), ());
     assert_eq!(test_fn_ignored_pair_named(), (Foo, Foo));
+    assert_eq!(test_fn_nested_pair(&((1.0, 2.0), 0)), (1.0, 2.0));
 }
diff --git a/src/test/run-pass/xcrate_generic_fn_nested_return.rs b/src/test/run-pass/xcrate_generic_fn_nested_return.rs
new file mode 100644 (file)
index 0000000..181c916
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:xcrate_generic_fn_nested_return.rs
+
+extern crate xcrate_generic_fn_nested_return as test;
+
+pub fn main() {
+    assert!(test::decode::<()>().is_err());
+}