]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #34700 - inejge:ai-hints, r=alexcrichton
authorManish Goregaokar <manishsmail@gmail.com>
Fri, 8 Jul 2016 07:44:21 +0000 (13:14 +0530)
committerManish Goregaokar <manishsmail@gmail.com>
Fri, 8 Jul 2016 09:17:00 +0000 (14:47 +0530)
Use hints with getaddrinfo() in std::net::lookup_host()

As noted in #24250, `std::net::lookup_host()` repeats each IPv[46] address in the result set. The number of repetitions is OS-dependent; e.g., Linux and FreeBSD give three copies, OpenBSD gives two. Filtering the duplicates can be done by the user if `lookup_host()` is used explicitly, but not with functions like `TcpStream::connect()`. What happens with the latter is that any unsuccessful connection attempt will be repeated as many times as there are duplicates of the address.

The program:

```rust
use std::net::TcpStream;

fn main() {
    let _stream = TcpStream::connect("localhost:4444").unwrap();
}
```

results in the following capture:

[capture-before.txt](https://github.com/rust-lang/rust/files/352004/capture-before.txt)

assuming that "localhost" resolves both to ::1 and 127.0.0.1, and that the listening program opens just an IPv4 socket (e.g., `nc -l 127.0.0.1 4444`.) The reason for this behavior is explained in [this comment](https://github.com/rust-lang/rust/issues/24250#issuecomment-92240152): `getaddrinfo()` is not constrained.

Various OSS projects (I checked out Postfix, OpenLDAP, Apache HTTPD and BIND) which use `getaddrinfo()` generally constrain the result set by using a non-NULL `hints` parameter and setting at least `ai_socktype` to `SOCK_STREAM`. `SOCK_DGRAM` would also work. Other parameters are unnecessary for pure name resolution.

The patch in this PR initializes a `hints` struct and passes it to `getaddrinfo()`, which eliminates the duplicates. The same test program as above with this change produces:

[capture-after.txt](https://github.com/rust-lang/rust/files/352042/capture-after.txt)

All `libstd` tests pass with this patch.

86 files changed:
README.md
RELEASES.md
mk/main.mk
src/bootstrap/Cargo.toml
src/bootstrap/bin/main.rs [new file with mode: 0644]
src/bootstrap/bin/rustc.rs [new file with mode: 0644]
src/bootstrap/bin/rustdoc.rs [new file with mode: 0644]
src/bootstrap/bootstrap.py
src/bootstrap/build/cc.rs [deleted file]
src/bootstrap/build/channel.rs [deleted file]
src/bootstrap/build/check.rs [deleted file]
src/bootstrap/build/clean.rs [deleted file]
src/bootstrap/build/compile.rs [deleted file]
src/bootstrap/build/config.rs [deleted file]
src/bootstrap/build/dist.rs [deleted file]
src/bootstrap/build/doc.rs [deleted file]
src/bootstrap/build/flags.rs [deleted file]
src/bootstrap/build/job.rs [deleted file]
src/bootstrap/build/mod.rs [deleted file]
src/bootstrap/build/native.rs [deleted file]
src/bootstrap/build/sanity.rs [deleted file]
src/bootstrap/build/step.rs [deleted file]
src/bootstrap/build/util.rs [deleted file]
src/bootstrap/cc.rs [new file with mode: 0644]
src/bootstrap/channel.rs [new file with mode: 0644]
src/bootstrap/check.rs [new file with mode: 0644]
src/bootstrap/clean.rs [new file with mode: 0644]
src/bootstrap/compile.rs [new file with mode: 0644]
src/bootstrap/config.rs [new file with mode: 0644]
src/bootstrap/dist.rs [new file with mode: 0644]
src/bootstrap/doc.rs [new file with mode: 0644]
src/bootstrap/flags.rs [new file with mode: 0644]
src/bootstrap/job.rs [new file with mode: 0644]
src/bootstrap/lib.rs
src/bootstrap/main.rs [deleted file]
src/bootstrap/native.rs [new file with mode: 0644]
src/bootstrap/rustc.rs [deleted file]
src/bootstrap/rustdoc.rs [deleted file]
src/bootstrap/sanity.rs [new file with mode: 0644]
src/bootstrap/step.rs [new file with mode: 0644]
src/bootstrap/util.rs [new file with mode: 0644]
src/doc/book/closures.md
src/doc/book/conditional-compilation.md
src/doc/book/documentation.md
src/doc/book/getting-started.md
src/doc/book/guessing-game.md
src/doc/book/inline-assembly.md
src/doc/book/loops.md
src/doc/book/mutability.md
src/doc/book/structs.md
src/doc/book/testing.md
src/etc/gdb_rust_pretty_printing.py
src/liballoc/rc.rs
src/libcore/iter/mod.rs
src/libcore/iter/traits.rs
src/libcore/num/f32.rs
src/libcore/num/f64.rs
src/libcore/num/int_macros.rs
src/libcore/num/mod.rs
src/libcore/num/uint_macros.rs
src/librustc/hir/mod.rs
src/librustc_resolve/lib.rs
src/librustc_save_analysis/dump_visitor.rs
src/librustc_trans/_match.rs
src/librustc_trans/base.rs
src/librustdoc/html/highlight.rs
src/librustdoc/html/render.rs
src/librustdoc/html/static/rustdoc.css
src/libstd/io/error.rs
src/libstd/memchr.rs
src/libstd/path.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/tokenstream.rs
src/libsyntax_ext/asm.rs
src/test/compile-fail/asm-bad-clobber.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs
src/test/run-pass-fulldeps/macro-crate.rs
src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs [new file with mode: 0644]
src/test/run-pass/issue-27021.rs [new file with mode: 0644]
src/test/run-pass/xcrate_generic_fn_nested_return.rs [new file with mode: 0644]

index 49236d6b671ef74cb8f4bb8f90d35171d243dc88..cdf5e735adf070965e4c10a33df337e5c429347c 100644 (file)
--- a/README.md
+++ b/README.md
@@ -66,7 +66,7 @@ build.
 
 [MSYS2][msys2] can be used to easily build Rust on Windows:
 
-msys2: https://msys2.github.io/
+[msys2]: https://msys2.github.io/
 
 1. Grab the latest [MSYS2 installer][msys2] and go through the installer.
 
index ffe8d64ff2da4c71b64338b88b460a5946f63b05..c798c56cd6d03f810d74976a66d2ab53b116c11c 100644 (file)
@@ -172,7 +172,7 @@ Libraries
   (https://github.com/rust-lang/rust/pull/33050).
 * [Implement `Display` and `Hash` for `std::num::Wrapping`]
   (https://github.com/rust-lang/rust/pull/33023).
-* [Add `Default` implementation for `&CStr`, `CString`, `Path`]
+* [Add `Default` implementation for `&CStr`, `CString`]
   (https://github.com/rust-lang/rust/pull/32990).
 * [Implement `From<Vec<T>>` and `Into<Vec<T>>` for `VecDeque<T>`]
   (https://github.com/rust-lang/rust/pull/32866).
index daf656f89c1a5a79bf621baf11b6bc972ae2a193..4c72597f0c5c1d2a2192a24f9d2337a36ae46be9 100644 (file)
@@ -13,7 +13,7 @@
 ######################################################################
 
 # The version number
-CFG_RELEASE_NUM=1.11.0
+CFG_RELEASE_NUM=1.12.0
 
 # An optional number to put after the label, e.g. '.2' -> '-beta.2'
 # NB Make sure it starts with a dot to conform to semver pre-release
index f9a64567ffde0e241a5f8c4d90aaae06e5f5b673..cde4a825be1fb81259d354b3cd266bfc47ab84d9 100644 (file)
@@ -9,15 +9,15 @@ path = "lib.rs"
 
 [[bin]]
 name = "bootstrap"
-path = "main.rs"
+path = "bin/main.rs"
 
 [[bin]]
 name = "rustc"
-path = "rustc.rs"
+path = "bin/rustc.rs"
 
 [[bin]]
 name = "rustdoc"
-path = "rustdoc.rs"
+path = "bin/rustdoc.rs"
 
 [dependencies]
 build_helper = { path = "../build_helper" }
diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs
new file mode 100644 (file)
index 0000000..c47f4fd
--- /dev/null
@@ -0,0 +1,37 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! rustbuild, the Rust build system
+//!
+//! This is the entry point for the build system used to compile the `rustc`
+//! compiler. Lots of documentation can be found in the `README.md` file next to
+//! this file, and otherwise documentation can be found throughout the `build`
+//! directory in each respective module.
+
+#![deny(warnings)]
+
+extern crate bootstrap;
+
+use std::env;
+
+use bootstrap::{Flags, Config, Build};
+
+fn main() {
+    let args = env::args().skip(1).collect::<Vec<_>>();
+    let flags = Flags::parse(&args);
+    let mut config = Config::parse(&flags.build, flags.config.clone());
+
+    // compat with `./configure` while we're still using that
+    if std::fs::metadata("config.mk").is_ok() {
+        config.update_with_config_mk();
+    }
+
+    Build::new(flags, config).build();
+}
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
new file mode 100644 (file)
index 0000000..c64cbb9
--- /dev/null
@@ -0,0 +1,165 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
+//!
+//! This shim will take care of some various tasks that our build process
+//! requires that Cargo can't quite do through normal configuration:
+//!
+//! 1. When compiling build scripts and build dependencies, we need a guaranteed
+//!    full standard library available. The only compiler which actually has
+//!    this is the snapshot, so we detect this situation and always compile with
+//!    the snapshot compiler.
+//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
+//!    (and this slightly differs based on a whether we're using a snapshot or
+//!    not), so we do that all here.
+//!
+//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
+//! switching compilers for the bootstrap and for build scripts will probably
+//! never get replaced.
+
+extern crate bootstrap;
+
+use std::env;
+use std::ffi::OsString;
+use std::path::PathBuf;
+use std::process::Command;
+
+fn main() {
+    let args = env::args_os().skip(1).collect::<Vec<_>>();
+    // Detect whether or not we're a build script depending on whether --target
+    // is passed (a bit janky...)
+    let target = args.windows(2).find(|w| &*w[0] == "--target")
+                                .and_then(|w| w[1].to_str());
+
+    // Build scripts always use the snapshot compiler which is guaranteed to be
+    // able to produce an executable, whereas intermediate compilers may not
+    // have the standard library built yet and may not be able to produce an
+    // executable. Otherwise we just use the standard compiler we're
+    // bootstrapping with.
+    let (rustc, libdir) = if target.is_none() {
+        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
+    } else {
+        ("RUSTC_REAL", "RUSTC_LIBDIR")
+    };
+    let stage = env::var("RUSTC_STAGE").unwrap();
+
+    let rustc = env::var_os(rustc).unwrap();
+    let libdir = env::var_os(libdir).unwrap();
+    let mut dylib_path = bootstrap::util::dylib_path();
+    dylib_path.insert(0, PathBuf::from(libdir));
+
+    let mut cmd = Command::new(rustc);
+    cmd.args(&args)
+       .arg("--cfg").arg(format!("stage{}", stage))
+       .env(bootstrap::util::dylib_path_var(),
+            env::join_paths(&dylib_path).unwrap());
+
+    if let Some(target) = target {
+        // The stage0 compiler has a special sysroot distinct from what we
+        // actually downloaded, so we just always pass the `--sysroot` option.
+        cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap());
+
+        // When we build Rust dylibs they're all intended for intermediate
+        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
+        // linking all deps statically into the dylib.
+        cmd.arg("-Cprefer-dynamic");
+
+        // Help the libc crate compile by assisting it in finding the MUSL
+        // native libraries.
+        if let Some(s) = env::var_os("MUSL_ROOT") {
+            let mut root = OsString::from("native=");
+            root.push(&s);
+            root.push("/lib");
+            cmd.arg("-L").arg(&root);
+        }
+
+        // Pass down extra flags, commonly used to configure `-Clinker` when
+        // cross compiling.
+        if let Ok(s) = env::var("RUSTC_FLAGS") {
+            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
+        }
+
+        // If we're compiling specifically the `panic_abort` crate then we pass
+        // the `-C panic=abort` option. Note that we do not do this for any
+        // other crate intentionally as this is the only crate for now that we
+        // ship with panic=abort.
+        //
+        // This... is a bit of a hack how we detect this. Ideally this
+        // information should be encoded in the crate I guess? Would likely
+        // require an RFC amendment to RFC 1513, however.
+        let is_panic_abort = args.windows(2).any(|a| {
+            &*a[0] == "--crate-name" && &*a[1] == "panic_abort"
+        });
+        // FIXME(stage0): remove this `stage != "0"` condition
+        if is_panic_abort && stage != "0" {
+            cmd.arg("-C").arg("panic=abort");
+        }
+
+        // Set various options from config.toml to configure how we're building
+        // code.
+        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
+            cmd.arg("-g");
+        }
+        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
+            Ok(s) => if s == "true" {"y"} else {"n"},
+            Err(..) => "n",
+        };
+        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
+        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
+            cmd.arg("-C").arg(format!("codegen-units={}", s));
+        }
+
+        // Dealing with rpath here is a little special, so let's go into some
+        // detail. First off, `-rpath` is a linker option on Unix platforms
+        // which adds to the runtime dynamic loader path when looking for
+        // dynamic libraries. We use this by default on Unix platforms to ensure
+        // that our nightlies behave the same on Windows, that is they work out
+        // of the box. This can be disabled, of course, but basically that's why
+        // we're gated on RUSTC_RPATH here.
+        //
+        // Ok, so the astute might be wondering "why isn't `-C rpath` used
+        // here?" and that is indeed a good question to task. This codegen
+        // option is the compiler's current interface to generating an rpath.
+        // Unfortunately it doesn't quite suffice for us. The flag currently
+        // takes no value as an argument, so the compiler calculates what it
+        // should pass to the linker as `-rpath`. This unfortunately is based on
+        // the **compile time** directory structure which when building with
+        // Cargo will be very different than the runtime directory structure.
+        //
+        // All that's a really long winded way of saying that if we use
+        // `-Crpath` then the executables generated have the wrong rpath of
+        // something like `$ORIGIN/deps` when in fact the way we distribute
+        // rustc requires the rpath to be `$ORIGIN/../lib`.
+        //
+        // So, all in all, to set up the correct rpath we pass the linker
+        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
+        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
+        // to change a flag in a binary?
+        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
+            let rpath = if target.contains("apple") {
+                Some("-Wl,-rpath,@loader_path/../lib")
+            } else if !target.contains("windows") {
+                Some("-Wl,-rpath,$ORIGIN/../lib")
+            } else {
+                None
+            };
+            if let Some(rpath) = rpath {
+                cmd.arg("-C").arg(format!("link-args={}", rpath));
+            }
+        }
+    }
+
+    // Actually run the compiler!
+    std::process::exit(match cmd.status() {
+        Ok(s) => s.code().unwrap_or(1),
+        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
+    })
+}
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
new file mode 100644 (file)
index 0000000..79629bf
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
+//!
+//! See comments in `src/bootstrap/rustc.rs` for more information.
+
+extern crate bootstrap;
+
+use std::env;
+use std::process::Command;
+use std::path::PathBuf;
+
+fn main() {
+    let args = env::args_os().skip(1).collect::<Vec<_>>();
+    let rustdoc = env::var_os("RUSTDOC_REAL").unwrap();
+    let libdir = env::var_os("RUSTC_LIBDIR").unwrap();
+
+    let mut dylib_path = bootstrap::util::dylib_path();
+    dylib_path.insert(0, PathBuf::from(libdir));
+
+    let mut cmd = Command::new(rustdoc);
+    cmd.args(&args)
+       .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap()))
+       .arg("--cfg").arg("dox")
+       .env(bootstrap::util::dylib_path_var(),
+            env::join_paths(&dylib_path).unwrap());
+    std::process::exit(match cmd.status() {
+        Ok(s) => s.code().unwrap_or(1),
+        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
+    })
+}
+
index 832911beb588c762d6ac329e04beda0eb2217d7e..17a7c9ca66a2664488d4187d4af588b51a847a36 100644 (file)
@@ -359,7 +359,7 @@ def main():
     parser.add_argument('--clean', action='store_true')
     parser.add_argument('-v', '--verbose', action='store_true')
 
-    args = [a for a in sys.argv if a != '-h']
+    args = [a for a in sys.argv if a != '-h' and a != '--help']
     args, _ = parser.parse_known_args(args)
 
     # Configure initial bootstrap
diff --git a/src/bootstrap/build/cc.rs b/src/bootstrap/build/cc.rs
deleted file mode 100644 (file)
index ff0941a..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! C-compiler probing and detection.
-//!
-//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
-//! C and C++ compilers for each target configured. A compiler is found through
-//! a number of vectors (in order of precedence)
-//!
-//! 1. Configuration via `target.$target.cc` in `config.toml`.
-//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
-//!    applicable
-//! 3. Special logic to probe on OpenBSD
-//! 4. The `CC_$target` environment variable.
-//! 5. The `CC` environment variable.
-//! 6. "cc"
-//!
-//! Some of this logic is implemented here, but much of it is farmed out to the
-//! `gcc` crate itself, so we end up having the same fallbacks as there.
-//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
-//! used.
-//!
-//! It is intended that after this module has run no C/C++ compiler will
-//! ever be probed for. Instead the compilers found here will be used for
-//! everything.
-
-use std::process::Command;
-
-use build_helper::{cc2ar, output};
-use gcc;
-
-use build::Build;
-use build::config::Target;
-
-pub fn find(build: &mut Build) {
-    // For all targets we're going to need a C compiler for building some shims
-    // and such as well as for being a linker for Rust code.
-    for target in build.config.target.iter() {
-        let mut cfg = gcc::Config::new();
-        cfg.cargo_metadata(false).opt_level(0).debug(false)
-           .target(target).host(&build.config.build);
-
-        let config = build.config.target_config.get(target);
-        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
-            cfg.compiler(cc);
-        } else {
-            set_compiler(&mut cfg, "gcc", target, config);
-        }
-
-        let compiler = cfg.get_compiler();
-        let ar = cc2ar(compiler.path(), target);
-        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
-        if let Some(ref ar) = ar {
-            build.verbose(&format!("AR_{} = {:?}", target, ar));
-        }
-        build.cc.insert(target.to_string(), (compiler, ar));
-    }
-
-    // For all host triples we need to find a C++ compiler as well
-    for host in build.config.host.iter() {
-        let mut cfg = gcc::Config::new();
-        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
-           .target(host).host(&build.config.build);
-        let config = build.config.target_config.get(host);
-        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
-            cfg.compiler(cxx);
-        } else {
-            set_compiler(&mut cfg, "g++", host, config);
-        }
-        let compiler = cfg.get_compiler();
-        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
-        build.cxx.insert(host.to_string(), compiler);
-    }
-}
-
-fn set_compiler(cfg: &mut gcc::Config,
-                gnu_compiler: &str,
-                target: &str,
-                config: Option<&Target>) {
-    match target {
-        // When compiling for android we may have the NDK configured in the
-        // config.toml in which case we look there. Otherwise the default
-        // compiler already takes into account the triple in question.
-        t if t.contains("android") => {
-            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
-                let target = target.replace("armv7", "arm");
-                let compiler = format!("{}-{}", target, gnu_compiler);
-                cfg.compiler(ndk.join("bin").join(compiler));
-            }
-        }
-
-        // The default gcc version from OpenBSD may be too old, try using egcc,
-        // which is a gcc version from ports, if this is the case.
-        t if t.contains("openbsd") => {
-            let c = cfg.get_compiler();
-            if !c.path().ends_with(gnu_compiler) {
-                return
-            }
-
-            let output = output(c.to_command().arg("--version"));
-            let i = match output.find(" 4.") {
-                Some(i) => i,
-                None => return,
-            };
-            match output[i + 3..].chars().next().unwrap() {
-                '0' ... '6' => {}
-                _ => return,
-            }
-            let alternative = format!("e{}", gnu_compiler);
-            if Command::new(&alternative).output().is_ok() {
-                cfg.compiler(alternative);
-            }
-        }
-
-        _ => {}
-    }
-}
diff --git a/src/bootstrap/build/channel.rs b/src/bootstrap/build/channel.rs
deleted file mode 100644 (file)
index 76d061e..0000000
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Build configuration for Rust's release channels.
-//!
-//! Implements the stable/beta/nightly channel distinctions by setting various
-//! flags like the `unstable_features`, calculating variables like `release` and
-//! `package_vers`, and otherwise indicating to the compiler what it should
-//! print out as part of its version information.
-
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::process::Command;
-
-use build_helper::output;
-use md5;
-
-use build::Build;
-
-pub fn collect(build: &mut Build) {
-    // Currently the canonical source for the release number (e.g. 1.10.0) and
-    // the prerelease version (e.g. `.1`) is in `mk/main.mk`. We "parse" that
-    // here to learn about those numbers.
-    let mut main_mk = String::new();
-    t!(t!(File::open(build.src.join("mk/main.mk"))).read_to_string(&mut main_mk));
-    let mut release_num = "";
-    let mut prerelease_version = "";
-    for line in main_mk.lines() {
-        if line.starts_with("CFG_RELEASE_NUM") {
-            release_num = line.split('=').skip(1).next().unwrap().trim();
-        }
-        if line.starts_with("CFG_PRERELEASE_VERSION") {
-            prerelease_version = line.split('=').skip(1).next().unwrap().trim();
-        }
-    }
-
-    // Depending on the channel, passed in `./configure --release-channel`,
-    // determine various properties of the build.
-    match &build.config.channel[..] {
-        "stable" => {
-            build.release = release_num.to_string();
-            build.package_vers = build.release.clone();
-            build.unstable_features = false;
-        }
-        "beta" => {
-            build.release = format!("{}-beta{}", release_num,
-                                   prerelease_version);
-            build.package_vers = "beta".to_string();
-            build.unstable_features = false;
-        }
-        "nightly" => {
-            build.release = format!("{}-nightly", release_num);
-            build.package_vers = "nightly".to_string();
-            build.unstable_features = true;
-        }
-        _ => {
-            build.release = format!("{}-dev", release_num);
-            build.package_vers = build.release.clone();
-            build.unstable_features = true;
-        }
-    }
-    build.version = build.release.clone();
-
-    // If we have a git directory, add in some various SHA information of what
-    // commit this compiler was compiled from.
-    if fs::metadata(build.src.join(".git")).is_ok() {
-        let ver_date = output(Command::new("git").current_dir(&build.src)
-                                      .arg("log").arg("-1")
-                                      .arg("--date=short")
-                                      .arg("--pretty=format:%cd"));
-        let ver_hash = output(Command::new("git").current_dir(&build.src)
-                                      .arg("rev-parse").arg("HEAD"));
-        let short_ver_hash = output(Command::new("git")
-                                            .current_dir(&build.src)
-                                            .arg("rev-parse")
-                                            .arg("--short=9")
-                                            .arg("HEAD"));
-        let ver_date = ver_date.trim().to_string();
-        let ver_hash = ver_hash.trim().to_string();
-        let short_ver_hash = short_ver_hash.trim().to_string();
-        build.version.push_str(&format!(" ({} {})", short_ver_hash,
-                                       ver_date));
-        build.ver_date = Some(ver_date.to_string());
-        build.ver_hash = Some(ver_hash);
-        build.short_ver_hash = Some(short_ver_hash);
-    }
-
-    // Calculate this compiler's bootstrap key, which is currently defined as
-    // the first 8 characters of the md5 of the release string.
-    let key = md5::compute(build.release.as_bytes());
-    build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
-                                  key[0], key[1], key[2], key[3]);
-
-    // Slurp up the stage0 bootstrap key as we're bootstrapping from an
-    // otherwise stable compiler.
-    let mut s = String::new();
-    t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
-    if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
-        if let Some(key) = line.split(": ").nth(1) {
-            build.bootstrap_key_stage0 = key.to_string();
-        }
-    }
-}
diff --git a/src/bootstrap/build/check.rs b/src/bootstrap/build/check.rs
deleted file mode 100644 (file)
index 0a096f8..0000000
+++ /dev/null
@@ -1,414 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of the various `check-*` targets of the build system.
-//!
-//! This file implements the various regression test suites that we execute on
-//! our CI.
-
-use std::env;
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build_helper::output;
-use bootstrap::{dylib_path, dylib_path_var};
-
-use build::{Build, Compiler, Mode};
-use build::util;
-
-const ADB_TEST_DIR: &'static str = "/data/tmp";
-
-/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will verify the validity of all our links in the
-/// documentation to ensure we don't have a bunch of dead ones.
-pub fn linkcheck(build: &Build, stage: u32, host: &str) {
-    println!("Linkcheck stage{} ({})", stage, host);
-    let compiler = Compiler::new(stage, host);
-    build.run(build.tool_cmd(&compiler, "linkchecker")
-                   .arg(build.out.join(host).join("doc")));
-}
-
-/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will check out a few Rust projects and run `cargo
-/// test` to ensure that we don't regress the test suites there.
-pub fn cargotest(build: &Build, stage: u32, host: &str) {
-    let ref compiler = Compiler::new(stage, host);
-
-    // Configure PATH to find the right rustc. NB. we have to use PATH
-    // and not RUSTC because the Cargo test suite has tests that will
-    // fail if rustc is not spelled `rustc`.
-    let path = build.sysroot(compiler).join("bin");
-    let old_path = ::std::env::var("PATH").expect("");
-    let sep = if cfg!(windows) { ";" } else {":" };
-    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
-
-    // Note that this is a short, cryptic, and not scoped directory name. This
-    // is currently to minimize the length of path on Windows where we otherwise
-    // quickly run into path name limit constraints.
-    let out_dir = build.out.join("ct");
-    t!(fs::create_dir_all(&out_dir));
-
-    build.run(build.tool_cmd(compiler, "cargotest")
-                   .env("PATH", newpath)
-                   .arg(&build.cargo)
-                   .arg(&out_dir));
-}
-
-/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` checks up on various bits and pieces of style and
-/// otherwise just implements a few lint-like checks that are specific to the
-/// compiler itself.
-pub fn tidy(build: &Build, stage: u32, host: &str) {
-    println!("tidy check stage{} ({})", stage, host);
-    let compiler = Compiler::new(stage, host);
-    build.run(build.tool_cmd(&compiler, "tidy")
-                   .arg(build.src.join("src")));
-}
-
-fn testdir(build: &Build, host: &str) -> PathBuf {
-    build.out.join(host).join("test")
-}
-
-/// Executes the `compiletest` tool to run a suite of tests.
-///
-/// Compiles all tests with `compiler` for `target` with the specified
-/// compiletest `mode` and `suite` arguments. For example `mode` can be
-/// "run-pass" or `suite` can be something like `debuginfo`.
-pub fn compiletest(build: &Build,
-                   compiler: &Compiler,
-                   target: &str,
-                   mode: &str,
-                   suite: &str) {
-    println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
-    let mut cmd = build.tool_cmd(compiler, "compiletest");
-
-    // compiletest currently has... a lot of arguments, so let's just pass all
-    // of them!
-
-    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
-    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
-    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
-    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
-    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
-    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
-    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
-    cmd.arg("--mode").arg(mode);
-    cmd.arg("--target").arg(target);
-    cmd.arg("--host").arg(compiler.host);
-    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
-
-    let mut flags = vec!["-Crpath".to_string()];
-    if build.config.rust_optimize_tests {
-        flags.push("-O".to_string());
-    }
-    if build.config.rust_debuginfo_tests {
-        flags.push("-g".to_string());
-    }
-
-    let mut hostflags = build.rustc_flags(&compiler.host);
-    hostflags.extend(flags.clone());
-    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
-
-    let mut targetflags = build.rustc_flags(&target);
-    targetflags.extend(flags);
-    targetflags.push(format!("-Lnative={}",
-                             build.test_helpers_out(target).display()));
-    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
-
-    // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
-    let python_default = "python";
-    cmd.arg("--docck-python").arg(python_default);
-
-    if build.config.build.ends_with("apple-darwin") {
-        // Force /usr/bin/python on OSX for LLDB tests because we're loading the
-        // LLDB plugin's compiled module which only works with the system python
-        // (namely not Homebrew-installed python)
-        cmd.arg("--lldb-python").arg("/usr/bin/python");
-    } else {
-        cmd.arg("--lldb-python").arg(python_default);
-    }
-
-    if let Some(ref vers) = build.gdb_version {
-        cmd.arg("--gdb-version").arg(vers);
-    }
-    if let Some(ref vers) = build.lldb_version {
-        cmd.arg("--lldb-version").arg(vers);
-    }
-    if let Some(ref dir) = build.lldb_python_dir {
-        cmd.arg("--lldb-python-dir").arg(dir);
-    }
-
-    cmd.args(&build.flags.args);
-
-    if build.config.verbose || build.flags.verbose {
-        cmd.arg("--verbose");
-    }
-
-    // Only pass correct values for these flags for the `run-make` suite as it
-    // requires that a C++ compiler was configured which isn't always the case.
-    if suite == "run-make" {
-        let llvm_config = build.llvm_config(target);
-        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
-        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
-        cmd.arg("--cc").arg(build.cc(target))
-           .arg("--cxx").arg(build.cxx(target))
-           .arg("--cflags").arg(build.cflags(target).join(" "))
-           .arg("--llvm-components").arg(llvm_components.trim())
-           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
-    } else {
-        cmd.arg("--cc").arg("")
-           .arg("--cxx").arg("")
-           .arg("--cflags").arg("")
-           .arg("--llvm-components").arg("")
-           .arg("--llvm-cxxflags").arg("");
-    }
-
-    // Running a C compiler on MSVC requires a few env vars to be set, to be
-    // sure to set them here.
-    if target.contains("msvc") {
-        for &(ref k, ref v) in build.cc[target].0.env() {
-            if k != "PATH" {
-                cmd.env(k, v);
-            }
-        }
-    }
-    build.add_bootstrap_key(compiler, &mut cmd);
-
-    cmd.arg("--adb-path").arg("adb");
-    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
-    if target.contains("android") {
-        // Assume that cc for this target comes from the android sysroot
-        cmd.arg("--android-cross-path")
-           .arg(build.cc(target).parent().unwrap().parent().unwrap());
-    } else {
-        cmd.arg("--android-cross-path").arg("");
-    }
-
-    build.run(&mut cmd);
-}
-
-/// Run `rustdoc --test` for all documentation in `src/doc`.
-///
-/// This will run all tests in our markdown documentation (e.g. the book)
-/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
-/// `compiler`.
-pub fn docs(build: &Build, compiler: &Compiler) {
-    // Do a breadth-first traversal of the `src/doc` directory and just run
-    // tests for all files that end in `*.md`
-    let mut stack = vec![build.src.join("src/doc")];
-
-    while let Some(p) = stack.pop() {
-        if p.is_dir() {
-            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
-            continue
-        }
-
-        if p.extension().and_then(|s| s.to_str()) != Some("md") {
-            continue
-        }
-
-        println!("doc tests for: {}", p.display());
-        markdown_test(build, compiler, &p);
-    }
-}
-
-/// Run the error index generator tool to execute the tests located in the error
-/// index.
-///
-/// The `error_index_generator` tool lives in `src/tools` and is used to
-/// generate a markdown file from the error indexes of the code base which is
-/// then passed to `rustdoc --test`.
-pub fn error_index(build: &Build, compiler: &Compiler) {
-    println!("Testing error-index stage{}", compiler.stage);
-
-    let output = testdir(build, compiler.host).join("error-index.md");
-    build.run(build.tool_cmd(compiler, "error_index_generator")
-                   .arg("markdown")
-                   .arg(&output)
-                   .env("CFG_BUILD", &build.config.build));
-
-    markdown_test(build, compiler, &output);
-}
-
-fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
-    let mut cmd = Command::new(build.rustdoc(compiler));
-    build.add_rustc_lib_path(compiler, &mut cmd);
-    cmd.arg("--test");
-    cmd.arg(markdown);
-    cmd.arg("--test-args").arg(build.flags.args.join(" "));
-    build.run(&mut cmd);
-}
-
-/// Run all unit tests plus documentation tests for an entire crate DAG defined
-/// by a `Cargo.toml`
-///
-/// This is what runs tests for crates like the standard library, compiler, etc.
-/// It essentially is the driver for running `cargo test`.
-///
-/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
-/// arguments, and those arguments are discovered from `Cargo.lock`.
-pub fn krate(build: &Build,
-             compiler: &Compiler,
-             target: &str,
-             mode: Mode) {
-    let (name, path, features) = match mode {
-        Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()),
-        Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()),
-        Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()),
-        _ => panic!("can only test libraries"),
-    };
-    println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
-             compiler.host, target);
-
-    // Build up the base `cargo test` command.
-    let mut cargo = build.cargo(compiler, mode, target, "test");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(path).join("Cargo.toml"))
-         .arg("--features").arg(features);
-
-    // Generate a list of `-p` arguments to pass to the `cargo test` invocation
-    // by crawling the corresponding Cargo.lock file.
-    let lockfile = build.src.join(path).join("Cargo.lock");
-    let mut contents = String::new();
-    t!(t!(File::open(&lockfile)).read_to_string(&mut contents));
-    let mut lines = contents.lines();
-    while let Some(line) = lines.next() {
-        let prefix = "name = \"";
-        if !line.starts_with(prefix) {
-            continue
-        }
-        lines.next(); // skip `version = ...`
-
-        // skip crates.io or otherwise non-path crates
-        if let Some(line) = lines.next() {
-            if line.starts_with("source") {
-                continue
-            }
-        }
-
-        let crate_name = &line[prefix.len()..line.len() - 1];
-
-        // Right now jemalloc is our only target-specific crate in the sense
-        // that it's not present on all platforms. Custom skip it here for now,
-        // but if we add more this probably wants to get more generalized.
-        if crate_name.contains("jemalloc") {
-            continue
-        }
-
-        cargo.arg("-p").arg(crate_name);
-    }
-
-    // The tests are going to run with the *target* libraries, so we need to
-    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
-    //
-    // Note that to run the compiler we need to run with the *host* libraries,
-    // but our wrapper scripts arrange for that to be the case anyway.
-    let mut dylib_path = dylib_path();
-    dylib_path.insert(0, build.sysroot_libdir(compiler, target));
-    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
-    if target.contains("android") {
-        build.run(cargo.arg("--no-run"));
-        krate_android(build, compiler, target, mode);
-    } else {
-        cargo.args(&build.flags.args);
-        build.run(&mut cargo);
-    }
-}
-
-fn krate_android(build: &Build,
-                 compiler: &Compiler,
-                 target: &str,
-                 mode: Mode) {
-    let mut tests = Vec::new();
-    let out_dir = build.cargo_out(compiler, mode, target);
-    find_tests(&out_dir, target, &mut tests);
-    find_tests(&out_dir.join("deps"), target, &mut tests);
-
-    for test in tests {
-        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
-
-        let test_file_name = test.file_name().unwrap().to_string_lossy();
-        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
-                          ADB_TEST_DIR,
-                          compiler.stage,
-                          target,
-                          compiler.host,
-                          test_file_name);
-        let program = format!("(cd {dir}; \
-                                LD_LIBRARY_PATH=./{target} ./{test} \
-                                    --logfile {log} \
-                                    {args})",
-                              dir = ADB_TEST_DIR,
-                              target = target,
-                              test = test_file_name,
-                              log = log,
-                              args = build.flags.args.join(" "));
-
-        let output = output(Command::new("adb").arg("shell").arg(&program));
-        println!("{}", output);
-        build.run(Command::new("adb")
-                          .arg("pull")
-                          .arg(&log)
-                          .arg(build.out.join("tmp")));
-        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
-        if !output.contains("result: ok") {
-            panic!("some tests failed");
-        }
-    }
-}
-
-fn find_tests(dir: &Path,
-              target: &str,
-              dst: &mut Vec<PathBuf>) {
-    for e in t!(dir.read_dir()).map(|e| t!(e)) {
-        let file_type = t!(e.file_type());
-        if !file_type.is_file() {
-            continue
-        }
-        let filename = e.file_name().into_string().unwrap();
-        if (target.contains("windows") && filename.ends_with(".exe")) ||
-           (!target.contains("windows") && !filename.contains(".")) {
-            dst.push(e.path());
-        }
-    }
-}
-
-pub fn android_copy_libs(build: &Build,
-                         compiler: &Compiler,
-                         target: &str) {
-    println!("Android copy libs to emulator ({})", target);
-    build.run(Command::new("adb").arg("remount"));
-    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
-    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
-    build.run(Command::new("adb")
-                      .arg("push")
-                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
-                      .arg(ADB_TEST_DIR));
-
-    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
-    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
-
-    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
-        let f = t!(f);
-        let name = f.file_name().into_string().unwrap();
-        if util::is_dylib(&name) {
-            build.run(Command::new("adb")
-                              .arg("push")
-                              .arg(f.path())
-                              .arg(&target_dir));
-        }
-    }
-}
diff --git a/src/bootstrap/build/clean.rs b/src/bootstrap/build/clean.rs
deleted file mode 100644 (file)
index 91334bd..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of `make clean` in rustbuild.
-//!
-//! Responsible for cleaning out a build directory of all old and stale
-//! artifacts to prepare for a fresh build. Currently doesn't remove the
-//! `build/cache` directory (download cache) or the `build/$target/llvm`
-//! directory as we want that cached between builds.
-
-use std::fs;
-use std::path::Path;
-
-use build::Build;
-
-pub fn clean(build: &Build) {
-    rm_rf(build, "tmp".as_ref());
-    rm_rf(build, &build.out.join("tmp"));
-
-    for host in build.config.host.iter() {
-
-        let out = build.out.join(host);
-
-        rm_rf(build, &out.join("compiler-rt"));
-        rm_rf(build, &out.join("doc"));
-
-        for stage in 0..4 {
-            rm_rf(build, &out.join(format!("stage{}", stage)));
-            rm_rf(build, &out.join(format!("stage{}-std", stage)));
-            rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
-            rm_rf(build, &out.join(format!("stage{}-tools", stage)));
-            rm_rf(build, &out.join(format!("stage{}-test", stage)));
-        }
-    }
-}
-
-fn rm_rf(build: &Build, path: &Path) {
-    if path.exists() {
-        build.verbose(&format!("removing `{}`", path.display()));
-        t!(fs::remove_dir_all(path));
-    }
-}
diff --git a/src/bootstrap/build/compile.rs b/src/bootstrap/build/compile.rs
deleted file mode 100644 (file)
index 5ed9c1c..0000000
+++ /dev/null
@@ -1,360 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of compiling various phases of the compiler and standard
-//! library.
-//!
-//! This module contains some of the real meat in the rustbuild build system
-//! which is where Cargo is used to compiler the standard library, libtest, and
-//! compiler. This module is also responsible for assembling the sysroot as it
-//! goes along from the output of the previous stage.
-
-use std::collections::HashMap;
-use std::fs;
-use std::path::{Path, PathBuf};
-use std::process::Command;
-
-use build_helper::output;
-
-use build::util::{exe, staticlib, libdir, mtime, is_dylib, copy};
-use build::{Build, Compiler, Mode};
-
-/// Build the standard library.
-///
-/// This will build the standard library for a particular stage of the build
-/// using the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-
-    // Move compiler-rt into place as it'll be required by the compiler when
-    // building the standard library to link the dylib of libstd
-    let libdir = build.sysroot_libdir(compiler, target);
-    let _ = fs::remove_dir_all(&libdir);
-    t!(fs::create_dir_all(&libdir));
-    copy(&build.compiler_rt_built.borrow()[target],
-         &libdir.join(staticlib("compiler-rt", target)));
-
-    // Some platforms have startup objects that may be required to produce the
-    // libstd dynamic library, for example.
-    build_startup_objects(build, target, &libdir);
-
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
-    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
-    cargo.arg("--features").arg(build.std_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"));
-
-    if let Some(target) = build.config.target_config.get(target) {
-        if let Some(ref jemalloc) = target.jemalloc {
-            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
-        }
-    }
-    if let Some(ref p) = build.config.musl_root {
-        if target.contains("musl") {
-            cargo.env("MUSL_ROOT", p);
-        }
-    }
-
-    build.run(&mut cargo);
-    std_link(build, target, compiler, compiler.host);
-}
-
-/// Link all libstd rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn std_link(build: &Build,
-                target: &str,
-                compiler: &Compiler,
-                host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-
-    // If we're linking one compiler host's output into another, then we weren't
-    // called from the `std` method above. In that case we clean out what's
-    // already there and then also link compiler-rt into place.
-    if host != compiler.host {
-        let _ = fs::remove_dir_all(&libdir);
-        t!(fs::create_dir_all(&libdir));
-        copy(&build.compiler_rt_built.borrow()[target],
-             &libdir.join(staticlib("compiler-rt", target)));
-    }
-    add_to_sysroot(&out_dir, &libdir);
-
-    if target.contains("musl") &&
-       (target.contains("x86_64") || target.contains("i686")) {
-        copy_third_party_objects(build, target, &libdir);
-    }
-}
-
-/// Copies the crt(1,i,n).o startup objects
-///
-/// Only required for musl targets that statically link to libc
-fn copy_third_party_objects(build: &Build, target: &str, into: &Path) {
-    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
-        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
-    }
-}
-
-/// Build and prepare startup objects like rsbegin.o and rsend.o
-///
-/// These are primarily used on Windows right now for linking executables/dlls.
-/// They don't require any library support as they're just plain old object
-/// files, so we just use the nightly snapshot compiler to always build them (as
-/// no other compilers are guaranteed to be available).
-fn build_startup_objects(build: &Build, target: &str, into: &Path) {
-    if !target.contains("pc-windows-gnu") {
-        return
-    }
-    let compiler = Compiler::new(0, &build.config.build);
-    let compiler = build.compiler_path(&compiler);
-
-    for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
-        let file = t!(file);
-        build.run(Command::new(&compiler)
-                          .arg("--emit=obj")
-                          .arg("--out-dir").arg(into)
-                          .arg(file.path()));
-    }
-
-    for obj in ["crt2.o", "dllcrt2.o"].iter() {
-        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
-    }
-}
-
-/// Build libtest.
-///
-/// This will build libtest and supporting libraries for a particular stage of
-/// the build using the `compiler` targeting the `target` architecture. The
-/// artifacts created will also be linked into the sysroot directory.
-pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    build.clear_if_dirty(&out_dir, &libstd_shim(build, compiler, target));
-    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
-    build.run(&mut cargo);
-    test_link(build, target, compiler, compiler.host);
-}
-
-/// Link all libtest rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn test_link(build: &Build,
-                 target: &str,
-                 compiler: &Compiler,
-                 host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    add_to_sysroot(&out_dir, &libdir);
-}
-
-/// Build the compiler.
-///
-/// This will build the compiler for a particular stage of the build using
-/// the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} compiler artifacts ({} -> {})",
-             compiler.stage, compiler.host, target);
-
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    build.clear_if_dirty(&out_dir, &libtest_shim(build, compiler, target));
-
-    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
-    cargo.arg("--features").arg(build.rustc_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"));
-
-    // Set some configuration variables picked up by build scripts and
-    // the compiler alike
-    cargo.env("CFG_RELEASE", &build.release)
-         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
-         .env("CFG_VERSION", &build.version)
-         .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key)
-         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new()))
-         .env("CFG_LIBDIR_RELATIVE", "lib");
-
-    if let Some(ref ver_date) = build.ver_date {
-        cargo.env("CFG_VER_DATE", ver_date);
-    }
-    if let Some(ref ver_hash) = build.ver_hash {
-        cargo.env("CFG_VER_HASH", ver_hash);
-    }
-    if !build.unstable_features {
-        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
-    }
-    cargo.env("LLVM_CONFIG", build.llvm_config(target));
-    if build.config.llvm_static_stdcpp {
-        cargo.env("LLVM_STATIC_STDCPP",
-                  compiler_file(build.cxx(target), "libstdc++.a"));
-    }
-    if let Some(ref s) = build.config.rustc_default_linker {
-        cargo.env("CFG_DEFAULT_LINKER", s);
-    }
-    if let Some(ref s) = build.config.rustc_default_ar {
-        cargo.env("CFG_DEFAULT_AR", s);
-    }
-    build.run(&mut cargo);
-
-    rustc_link(build, target, compiler, compiler.host);
-}
-
-/// Link all librustc rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn rustc_link(build: &Build,
-                  target: &str,
-                  compiler: &Compiler,
-                  host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    add_to_sysroot(&out_dir, &libdir);
-}
-
-/// Cargo's output path for the standard library in a given stage, compiled
-/// by a particular compiler for the specified target.
-fn libstd_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libstd, target).join("libstd_shim.rlib")
-}
-
-/// Cargo's output path for libtest in a given stage, compiled by a particular
-/// compiler for the specified target.
-fn libtest_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libtest, target).join("libtest_shim.rlib")
-}
-
-fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
-    let out = output(Command::new(compiler)
-                            .arg(format!("-print-file-name={}", file)));
-    PathBuf::from(out.trim())
-}
-
-/// Prepare a new compiler from the artifacts in `stage`
-///
-/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
-/// must have been previously produced by the `stage - 1` build.config.build
-/// compiler.
-pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
-    assert!(stage > 0, "the stage0 compiler isn't assembled, it's downloaded");
-    // The compiler that we're assembling
-    let target_compiler = Compiler::new(stage, host);
-
-    // The compiler that compiled the compiler we're assembling
-    let build_compiler = Compiler::new(stage - 1, &build.config.build);
-
-    // Clear out old files
-    let sysroot = build.sysroot(&target_compiler);
-    let _ = fs::remove_dir_all(&sysroot);
-    t!(fs::create_dir_all(&sysroot));
-
-    // Link in all dylibs to the libdir
-    let sysroot_libdir = sysroot.join(libdir(host));
-    t!(fs::create_dir_all(&sysroot_libdir));
-    let src_libdir = build.sysroot_libdir(&build_compiler, host);
-    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
-        let filename = f.file_name().into_string().unwrap();
-        if is_dylib(&filename) {
-            copy(&f.path(), &sysroot_libdir.join(&filename));
-        }
-    }
-
-    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
-
-    // Link the compiler binary itself into place
-    let rustc = out_dir.join(exe("rustc", host));
-    let bindir = sysroot.join("bin");
-    t!(fs::create_dir_all(&bindir));
-    let compiler = build.compiler_path(&Compiler::new(stage, host));
-    let _ = fs::remove_file(&compiler);
-    copy(&rustc, &compiler);
-
-    // See if rustdoc exists to link it into place
-    let rustdoc = exe("rustdoc", host);
-    let rustdoc_src = out_dir.join(&rustdoc);
-    let rustdoc_dst = bindir.join(&rustdoc);
-    if fs::metadata(&rustdoc_src).is_ok() {
-        let _ = fs::remove_file(&rustdoc_dst);
-        copy(&rustdoc_src, &rustdoc_dst);
-    }
-}
-
-/// Link some files into a rustc sysroot.
-///
-/// For a particular stage this will link all of the contents of `out_dir`
-/// into the sysroot of the `host` compiler, assuming the artifacts are
-/// compiled for the specified `target`.
-fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
-    // Collect the set of all files in the dependencies directory, keyed
-    // off the name of the library. We assume everything is of the form
-    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
-    // `<hash>` values for the same name (of old builds).
-    let mut map = HashMap::new();
-    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
-        let filename = file.file_name().into_string().unwrap();
-
-        // We're only interested in linking rlibs + dylibs, other things like
-        // unit tests don't get linked in
-        if !filename.ends_with(".rlib") &&
-           !filename.ends_with(".lib") &&
-           !is_dylib(&filename) {
-            continue
-        }
-        let file = file.path();
-        let dash = filename.find("-").unwrap();
-        let key = (filename[..dash].to_string(),
-                   file.extension().unwrap().to_owned());
-        map.entry(key).or_insert(Vec::new())
-           .push(file.clone());
-    }
-
-    // For all hash values found, pick the most recent one to move into the
-    // sysroot, that should be the one we just built.
-    for (_, paths) in map {
-        let (_, path) = paths.iter().map(|path| {
-            (mtime(&path).seconds(), path)
-        }).max().unwrap();
-        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
-    }
-}
-
-/// Build a tool in `src/tools`
-///
-/// This will build the specified tool with the specified `host` compiler in
-/// `stage` into the normal cargo output directory.
-pub fn tool(build: &Build, stage: u32, host: &str, tool: &str) {
-    println!("Building stage{} tool {} ({})", stage, tool, host);
-
-    let compiler = Compiler::new(stage, host);
-
-    // FIXME: need to clear out previous tool and ideally deps, may require
-    //        isolating output directories or require a pseudo shim step to
-    //        clear out all the info.
-    //
-    //        Maybe when libstd is compiled it should clear out the rustc of the
-    //        corresponding stage?
-    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
-    // build.clear_if_dirty(&out_dir, &libstd_shim(build, stage, &host, target));
-
-    let mut cargo = build.cargo(&compiler, Mode::Tool, host, "build");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(format!("src/tools/{}/Cargo.toml", tool)));
-    build.run(&mut cargo);
-}
diff --git a/src/bootstrap/build/config.rs b/src/bootstrap/build/config.rs
deleted file mode 100644 (file)
index 498196e..0000000
+++ /dev/null
@@ -1,396 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Serialized configuration of a build.
-//!
-//! This module implements parsing `config.mk` and `config.toml` configuration
-//! files to tweak how the build runs.
-
-use std::collections::HashMap;
-use std::env;
-use std::fs::File;
-use std::io::prelude::*;
-use std::path::PathBuf;
-use std::process;
-
-use num_cpus;
-use rustc_serialize::Decodable;
-use toml::{Parser, Decoder, Value};
-
-/// Global configuration for the entire build and/or bootstrap.
-///
-/// This structure is derived from a combination of both `config.toml` and
-/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
-/// is used all that much, so this is primarily filled out by `config.mk` which
-/// is generated from `./configure`.
-///
-/// Note that this structure is not decoded directly into, but rather it is
-/// filled out from the decoded forms of the structs below. For documentation
-/// each field, see the corresponding fields in
-/// `src/bootstrap/config.toml.example`.
-#[derive(Default)]
-pub struct Config {
-    pub ccache: bool,
-    pub ninja: bool,
-    pub verbose: bool,
-    pub submodules: bool,
-    pub compiler_docs: bool,
-    pub docs: bool,
-    pub target_config: HashMap<String, Target>,
-
-    // llvm codegen options
-    pub llvm_assertions: bool,
-    pub llvm_optimize: bool,
-    pub llvm_version_check: bool,
-    pub llvm_static_stdcpp: bool,
-
-    // rust codegen options
-    pub rust_optimize: bool,
-    pub rust_codegen_units: u32,
-    pub rust_debug_assertions: bool,
-    pub rust_debuginfo: bool,
-    pub rust_rpath: bool,
-    pub rustc_default_linker: Option<String>,
-    pub rustc_default_ar: Option<String>,
-    pub rust_optimize_tests: bool,
-    pub rust_debuginfo_tests: bool,
-
-    pub build: String,
-    pub host: Vec<String>,
-    pub target: Vec<String>,
-    pub rustc: Option<PathBuf>,
-    pub cargo: Option<PathBuf>,
-    pub local_rebuild: bool,
-
-    // libstd features
-    pub debug_jemalloc: bool,
-    pub use_jemalloc: bool,
-
-    // misc
-    pub channel: String,
-    pub musl_root: Option<PathBuf>,
-    pub prefix: Option<String>,
-}
-
-/// Per-target configuration stored in the global configuration structure.
-#[derive(Default)]
-pub struct Target {
-    pub llvm_config: Option<PathBuf>,
-    pub jemalloc: Option<PathBuf>,
-    pub cc: Option<PathBuf>,
-    pub cxx: Option<PathBuf>,
-    pub ndk: Option<PathBuf>,
-}
-
-/// Structure of the `config.toml` file that configuration is read from.
-///
-/// This structure uses `Decodable` to automatically decode a TOML configuration
-/// file into this format, and then this is traversed and written into the above
-/// `Config` structure.
-#[derive(RustcDecodable, Default)]
-struct TomlConfig {
-    build: Option<Build>,
-    llvm: Option<Llvm>,
-    rust: Option<Rust>,
-    target: Option<HashMap<String, TomlTarget>>,
-}
-
-/// TOML representation of various global build decisions.
-#[derive(RustcDecodable, Default, Clone)]
-struct Build {
-    build: Option<String>,
-    host: Vec<String>,
-    target: Vec<String>,
-    cargo: Option<String>,
-    rustc: Option<String>,
-    compiler_docs: Option<bool>,
-    docs: Option<bool>,
-}
-
-/// TOML representation of how the LLVM build is configured.
-#[derive(RustcDecodable, Default)]
-struct Llvm {
-    ccache: Option<bool>,
-    ninja: Option<bool>,
-    assertions: Option<bool>,
-    optimize: Option<bool>,
-    version_check: Option<bool>,
-    static_libstdcpp: Option<bool>,
-}
-
-/// TOML representation of how the Rust build is configured.
-#[derive(RustcDecodable, Default)]
-struct Rust {
-    optimize: Option<bool>,
-    codegen_units: Option<u32>,
-    debug_assertions: Option<bool>,
-    debuginfo: Option<bool>,
-    debug_jemalloc: Option<bool>,
-    use_jemalloc: Option<bool>,
-    default_linker: Option<String>,
-    default_ar: Option<String>,
-    channel: Option<String>,
-    musl_root: Option<String>,
-    rpath: Option<bool>,
-    optimize_tests: Option<bool>,
-    debuginfo_tests: Option<bool>,
-}
-
-/// TOML representation of how each build target is configured.
-#[derive(RustcDecodable, Default)]
-struct TomlTarget {
-    llvm_config: Option<String>,
-    jemalloc: Option<String>,
-    cc: Option<String>,
-    cxx: Option<String>,
-    android_ndk: Option<String>,
-}
-
-impl Config {
-    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
-        let mut config = Config::default();
-        config.llvm_optimize = true;
-        config.use_jemalloc = true;
-        config.rust_optimize = true;
-        config.rust_optimize_tests = true;
-        config.submodules = true;
-        config.docs = true;
-        config.rust_rpath = true;
-        config.rust_codegen_units = 1;
-        config.build = build.to_string();
-        config.channel = "dev".to_string();
-
-        let toml = file.map(|file| {
-            let mut f = t!(File::open(&file));
-            let mut toml = String::new();
-            t!(f.read_to_string(&mut toml));
-            let mut p = Parser::new(&toml);
-            let table = match p.parse() {
-                Some(table) => table,
-                None => {
-                    println!("failed to parse TOML configuration:");
-                    for err in p.errors.iter() {
-                        let (loline, locol) = p.to_linecol(err.lo);
-                        let (hiline, hicol) = p.to_linecol(err.hi);
-                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
-                                 hicol, err.desc);
-                    }
-                    process::exit(2);
-                }
-            };
-            let mut d = Decoder::new(Value::Table(table));
-            match Decodable::decode(&mut d) {
-                Ok(cfg) => cfg,
-                Err(e) => {
-                    println!("failed to decode TOML: {}", e);
-                    process::exit(2);
-                }
-            }
-        }).unwrap_or_else(|| TomlConfig::default());
-
-        let build = toml.build.clone().unwrap_or(Build::default());
-        set(&mut config.build, build.build.clone());
-        config.host.push(config.build.clone());
-        for host in build.host.iter() {
-            if !config.host.contains(host) {
-                config.host.push(host.clone());
-            }
-        }
-        for target in config.host.iter().chain(&build.target) {
-            if !config.target.contains(target) {
-                config.target.push(target.clone());
-            }
-        }
-        config.rustc = build.rustc.map(PathBuf::from);
-        config.cargo = build.cargo.map(PathBuf::from);
-        set(&mut config.compiler_docs, build.compiler_docs);
-        set(&mut config.docs, build.docs);
-
-        if let Some(ref llvm) = toml.llvm {
-            set(&mut config.ccache, llvm.ccache);
-            set(&mut config.ninja, llvm.ninja);
-            set(&mut config.llvm_assertions, llvm.assertions);
-            set(&mut config.llvm_optimize, llvm.optimize);
-            set(&mut config.llvm_version_check, llvm.version_check);
-            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
-        }
-        if let Some(ref rust) = toml.rust {
-            set(&mut config.rust_debug_assertions, rust.debug_assertions);
-            set(&mut config.rust_debuginfo, rust.debuginfo);
-            set(&mut config.rust_optimize, rust.optimize);
-            set(&mut config.rust_optimize_tests, rust.optimize_tests);
-            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
-            set(&mut config.rust_rpath, rust.rpath);
-            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
-            set(&mut config.use_jemalloc, rust.use_jemalloc);
-            set(&mut config.channel, rust.channel.clone());
-            config.rustc_default_linker = rust.default_linker.clone();
-            config.rustc_default_ar = rust.default_ar.clone();
-            config.musl_root = rust.musl_root.clone().map(PathBuf::from);
-
-            match rust.codegen_units {
-                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
-                Some(n) => config.rust_codegen_units = n,
-                None => {}
-            }
-        }
-
-        if let Some(ref t) = toml.target {
-            for (triple, cfg) in t {
-                let mut target = Target::default();
-
-                if let Some(ref s) = cfg.llvm_config {
-                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
-                }
-                if let Some(ref s) = cfg.jemalloc {
-                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
-                }
-                if let Some(ref s) = cfg.android_ndk {
-                    target.ndk = Some(env::current_dir().unwrap().join(s));
-                }
-                target.cxx = cfg.cxx.clone().map(PathBuf::from);
-                target.cc = cfg.cc.clone().map(PathBuf::from);
-
-                config.target_config.insert(triple.clone(), target);
-            }
-        }
-
-        return config
-    }
-
-    /// "Temporary" routine to parse `config.mk` into this configuration.
-    ///
-    /// While we still have `./configure` this implements the ability to decode
-    /// that configuration into this. This isn't exactly a full-blown makefile
-    /// parser, but hey it gets the job done!
-    pub fn update_with_config_mk(&mut self) {
-        let mut config = String::new();
-        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
-        for line in config.lines() {
-            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
-            let key = parts.next().unwrap();
-            let value = match parts.next() {
-                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
-                Some(n) => n,
-                None => continue
-            };
-
-            macro_rules! check {
-                ($(($name:expr, $val:expr),)*) => {
-                    if value == "1" {
-                        $(
-                            if key == concat!("CFG_ENABLE_", $name) {
-                                $val = true;
-                                continue
-                            }
-                            if key == concat!("CFG_DISABLE_", $name) {
-                                $val = false;
-                                continue
-                            }
-                        )*
-                    }
-                }
-            }
-
-            check! {
-                ("CCACHE", self.ccache),
-                ("MANAGE_SUBMODULES", self.submodules),
-                ("COMPILER_DOCS", self.compiler_docs),
-                ("DOCS", self.docs),
-                ("LLVM_ASSERTIONS", self.llvm_assertions),
-                ("OPTIMIZE_LLVM", self.llvm_optimize),
-                ("LLVM_VERSION_CHECK", self.llvm_version_check),
-                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
-                ("OPTIMIZE", self.rust_optimize),
-                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
-                ("DEBUGINFO", self.rust_debuginfo),
-                ("JEMALLOC", self.use_jemalloc),
-                ("DEBUG_JEMALLOC", self.debug_jemalloc),
-                ("RPATH", self.rust_rpath),
-                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
-                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
-                ("LOCAL_REBUILD", self.local_rebuild),
-            }
-
-            match key {
-                "CFG_BUILD" => self.build = value.to_string(),
-                "CFG_HOST" => {
-                    self.host = value.split(" ").map(|s| s.to_string())
-                                     .collect();
-                }
-                "CFG_TARGET" => {
-                    self.target = value.split(" ").map(|s| s.to_string())
-                                       .collect();
-                }
-                "CFG_MUSL_ROOT" if value.len() > 0 => {
-                    self.musl_root = Some(PathBuf::from(value));
-                }
-                "CFG_DEFAULT_AR" if value.len() > 0 => {
-                    self.rustc_default_ar = Some(value.to_string());
-                }
-                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
-                    self.rustc_default_linker = Some(value.to_string());
-                }
-                "CFG_RELEASE_CHANNEL" => {
-                    self.channel = value.to_string();
-                }
-                "CFG_PREFIX" => {
-                    self.prefix = Some(value.to_string());
-                }
-                "CFG_LLVM_ROOT" if value.len() > 0 => {
-                    let target = self.target_config.entry(self.build.clone())
-                                     .or_insert(Target::default());
-                    let root = PathBuf::from(value);
-                    target.llvm_config = Some(root.join("bin/llvm-config"));
-                }
-                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
-                    let target = self.target_config.entry(self.build.clone())
-                                     .or_insert(Target::default());
-                    target.jemalloc = Some(PathBuf::from(value));
-                }
-                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "arm-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "armv7-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "i686-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "aarch64-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
-                    self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
-                    self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
-                }
-                _ => {}
-            }
-        }
-    }
-}
-
-fn set<T>(field: &mut T, val: Option<T>) {
-    if let Some(v) = val {
-        *field = v;
-    }
-}
diff --git a/src/bootstrap/build/dist.rs b/src/bootstrap/build/dist.rs
deleted file mode 100644 (file)
index 6eed7ea..0000000
+++ /dev/null
@@ -1,319 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of the various distribution aspects of the compiler.
-//!
-//! This module is responsible for creating tarballs of the standard library,
-//! compiler, and documentation. This ends up being what we distribute to
-//! everyone as well.
-//!
-//! No tarball is actually created literally in this file, but rather we shell
-//! out to `rust-installer` still. This may one day be replaced with bits and
-//! pieces of `rustup.rs`!
-
-use std::fs::{self, File};
-use std::io::Write;
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build::{Build, Compiler};
-use build::util::{cp_r, libdir, is_dylib};
-
-fn package_vers(build: &Build) -> &str {
-    match &build.config.channel[..] {
-        "stable" => &build.release,
-        "beta" => "beta",
-        "nightly" => "nightly",
-        _ => &build.release,
-    }
-}
-
-fn distdir(build: &Build) -> PathBuf {
-    build.out.join("dist")
-}
-
-fn tmpdir(build: &Build) -> PathBuf {
-    build.out.join("tmp/dist")
-}
-
-/// Builds the `rust-docs` installer component.
-///
-/// Slurps up documentation from the `stage`'s `host`.
-pub fn docs(build: &Build, stage: u32, host: &str) {
-    println!("Dist docs stage{} ({})", stage, host);
-    let name = format!("rust-docs-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, name));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("share/doc/rust/html");
-    t!(fs::create_dir_all(&dst));
-    let src = build.out.join(host).join("doc");
-    cp_r(&src, &dst);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust-Documentation")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-documentation-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-docs")
-       .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg("--bulk-dirs=share/doc/rust/html");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-
-    // As part of this step, *also* copy the docs directory to a directory which
-    // buildbot typically uploads.
-    if host == build.config.build {
-        let dst = distdir(build).join("doc").join(&build.package_vers);
-        t!(fs::create_dir_all(&dst));
-        cp_r(&src, &dst);
-    }
-}
-
-/// Build the `rust-mingw` installer component.
-///
-/// This contains all the bits and pieces to run the MinGW Windows targets
-/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
-/// Currently just shells out to a python script, but that should be rewritten
-/// in Rust.
-pub fn mingw(build: &Build, host: &str) {
-    println!("Dist mingw ({})", host);
-    let name = format!("rust-mingw-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-
-    // The first argument to the script is a "temporary directory" which is just
-    // thrown away (this contains the runtime DLLs included in the rustc package
-    // above) and the second argument is where to place all the MinGW components
-    // (which is what we want).
-    //
-    // FIXME: this script should be rewritten into Rust
-    let mut cmd = Command::new("python");
-    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
-       .arg(tmpdir(build))
-       .arg(&image)
-       .arg(host);
-    build.run(&mut cmd);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust-MinGW")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-MinGW-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-mingw")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-}
-
-/// Creates the `rustc` installer component.
-pub fn rustc(build: &Build, stage: u32, host: &str) {
-    println!("Dist rustc stage{} ({})", stage, host);
-    let name = format!("rustc-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
-    let _ = fs::remove_dir_all(&overlay);
-
-    // Prepare the rustc "image", what will actually end up getting installed
-    prepare_image(build, stage, host, &image);
-
-    // Prepare the overlay which is part of the tarball but won't actually be
-    // installed
-    let cp = |file: &str| {
-        install(&build.src.join(file), &overlay, 0o644);
-    };
-    cp("COPYRIGHT");
-    cp("LICENSE-APACHE");
-    cp("LICENSE-MIT");
-    cp("README.md");
-    // tiny morsel of metadata is used by rust-packaging
-    let version = &build.version;
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // On MinGW we've got a few runtime DLL dependencies that we need to
-    // include. The first argument to this script is where to put these DLLs
-    // (the image we're creating), and the second argument is a junk directory
-    // to ignore all other MinGW stuff the script creates.
-    //
-    // On 32-bit MinGW we're always including a DLL which needs some extra
-    // licenses to distribute. On 64-bit MinGW we don't actually distribute
-    // anything requiring us to distribute a license, but it's likely the
-    // install will *also* include the rust-mingw package, which also needs
-    // licenses, so to be safe we just include it here in all MinGW packages.
-    //
-    // FIXME: this script should be rewritten into Rust
-    if host.contains("pc-windows-gnu") {
-        let mut cmd = Command::new("python");
-        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
-           .arg(&image)
-           .arg(tmpdir(build))
-           .arg(host);
-        build.run(&mut cmd);
-
-        let dst = image.join("share/doc");
-        t!(fs::create_dir_all(&dst));
-        cp_r(&build.src.join("src/etc/third-party"), &dst);
-    }
-
-    // Finally, wrap everything up in a nice tarball!
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rustc")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-    t!(fs::remove_dir_all(&overlay));
-
-    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
-        let src = build.sysroot(&Compiler::new(stage, host));
-        let libdir = libdir(host);
-
-        // Copy rustc/rustdoc binaries
-        t!(fs::create_dir_all(image.join("bin")));
-        cp_r(&src.join("bin"), &image.join("bin"));
-
-        // Copy runtime DLLs needed by the compiler
-        if libdir != "bin" {
-            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
-                let name = entry.file_name();
-                if let Some(s) = name.to_str() {
-                    if is_dylib(s) {
-                        install(&entry.path(), &image.join(libdir), 0o644);
-                    }
-                }
-            }
-        }
-
-        // Man pages
-        t!(fs::create_dir_all(image.join("share/man/man1")));
-        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
-
-        // Debugger scripts
-        debugger_scripts(build, &image, host);
-
-        // Misc license info
-        let cp = |file: &str| {
-            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
-        };
-        cp("COPYRIGHT");
-        cp("LICENSE-APACHE");
-        cp("LICENSE-MIT");
-        cp("README.md");
-    }
-}
-
-/// Copies debugger scripts for `host` into the `sysroot` specified.
-pub fn debugger_scripts(build: &Build,
-                        sysroot: &Path,
-                        host: &str) {
-    let cp_debugger_script = |file: &str| {
-        let dst = sysroot.join("lib/rustlib/etc");
-        t!(fs::create_dir_all(&dst));
-        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
-    };
-    if host.contains("windows-msvc") {
-        // no debugger scripts
-    } else {
-        cp_debugger_script("debugger_pretty_printers_common.py");
-
-        // gdb debugger scripts
-        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
-                0o755);
-
-        cp_debugger_script("gdb_load_rust_pretty_printers.py");
-        cp_debugger_script("gdb_rust_pretty_printing.py");
-
-        // lldb debugger scripts
-        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
-                0o755);
-
-        cp_debugger_script("lldb_rust_formatters.py");
-    }
-}
-
-/// Creates the `rust-std` installer component as compiled by `compiler` for the
-/// target `target`.
-pub fn std(build: &Build, compiler: &Compiler, target: &str) {
-    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
-             target);
-    let name = format!("rust-std-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("lib/rustlib").join(target);
-    t!(fs::create_dir_all(&dst));
-    let src = build.sysroot(compiler).join("lib/rustlib");
-    cp_r(&src.join(target), &dst);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=std-is-standing-at-the-ready.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg(format!("--component-name=rust-std-{}", target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-}
-
-fn install(src: &Path, dstdir: &Path, perms: u32) {
-    let dst = dstdir.join(src.file_name().unwrap());
-    t!(fs::create_dir_all(dstdir));
-    t!(fs::copy(src, &dst));
-    chmod(&dst, perms);
-}
-
-#[cfg(unix)]
-fn chmod(path: &Path, perms: u32) {
-    use std::os::unix::fs::*;
-    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
-}
-#[cfg(windows)]
-fn chmod(_path: &Path, _perms: u32) {}
-
-// We have to run a few shell scripts, which choke quite a bit on both `\`
-// characters and on `C:\` paths, so normalize both of them away.
-fn sanitize_sh(path: &Path) -> String {
-    let path = path.to_str().unwrap().replace("\\", "/");
-    return change_drive(&path).unwrap_or(path);
-
-    fn change_drive(s: &str) -> Option<String> {
-        let mut ch = s.chars();
-        let drive = ch.next().unwrap_or('C');
-        if ch.next() != Some(':') {
-            return None
-        }
-        if ch.next() != Some('/') {
-            return None
-        }
-        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
-    }
-}
diff --git a/src/bootstrap/build/doc.rs b/src/bootstrap/build/doc.rs
deleted file mode 100644 (file)
index f7cc742..0000000
+++ /dev/null
@@ -1,207 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Documentation generation for rustbuild.
-//!
-//! This module implements generation for all bits and pieces of documentation
-//! for the Rust project. This notably includes suites like the rust book, the
-//! nomicon, standalone documentation, etc.
-//!
-//! Everything here is basically just a shim around calling either `rustbook` or
-//! `rustdoc`.
-
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::path::Path;
-use std::process::Command;
-
-use build::{Build, Compiler, Mode};
-use build::util::{up_to_date, cp_r};
-
-/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
-/// `name` into the `out` path.
-///
-/// This will not actually generate any documentation if the documentation has
-/// already been generated.
-pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {
-    t!(fs::create_dir_all(out));
-
-    let out = out.join(name);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let src = build.src.join("src/doc").join(name);
-    let index = out.join("index.html");
-    let rustbook = build.tool(&compiler, "rustbook");
-    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
-        return
-    }
-    println!("Rustbook stage{} ({}) - {}", stage, target, name);
-    let _ = fs::remove_dir_all(&out);
-    build.run(build.tool_cmd(&compiler, "rustbook")
-                   .arg("build")
-                   .arg(&src)
-                   .arg(out));
-}
-
-/// Generates all standalone documentation as compiled by the rustdoc in `stage`
-/// for the `target` into `out`.
-///
-/// This will list all of `src/doc` looking for markdown files and appropriately
-/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
-/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
-///
-/// In the end, this is just a glorified wrapper around rustdoc!
-pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} standalone ({})", stage, target);
-    t!(fs::create_dir_all(out));
-
-    let compiler = Compiler::new(stage, &build.config.build);
-
-    let favicon = build.src.join("src/doc/favicon.inc");
-    let footer = build.src.join("src/doc/footer.inc");
-    let full_toc = build.src.join("src/doc/full-toc.inc");
-    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
-
-    let version_input = build.src.join("src/doc/version_info.html.template");
-    let version_info = out.join("version_info.html");
-
-    if !up_to_date(&version_input, &version_info) {
-        let mut info = String::new();
-        t!(t!(File::open(&version_input)).read_to_string(&mut info));
-        let blank = String::new();
-        let short = build.short_ver_hash.as_ref().unwrap_or(&blank);
-        let hash = build.ver_hash.as_ref().unwrap_or(&blank);
-        let info = info.replace("VERSION", &build.release)
-                       .replace("SHORT_HASH", short)
-                       .replace("STAMP", hash);
-        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
-    }
-
-    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
-        let file = t!(file);
-        let path = file.path();
-        let filename = path.file_name().unwrap().to_str().unwrap();
-        if !filename.ends_with(".md") || filename == "README.md" {
-            continue
-        }
-
-        let html = out.join(filename).with_extension("html");
-        let rustdoc = build.rustdoc(&compiler);
-        if up_to_date(&path, &html) &&
-           up_to_date(&footer, &html) &&
-           up_to_date(&favicon, &html) &&
-           up_to_date(&full_toc, &html) &&
-           up_to_date(&version_info, &html) &&
-           up_to_date(&rustdoc, &html) {
-            continue
-        }
-
-        let mut cmd = Command::new(&rustdoc);
-        build.add_rustc_lib_path(&compiler, &mut cmd);
-        cmd.arg("--html-after-content").arg(&footer)
-           .arg("--html-before-content").arg(&version_info)
-           .arg("--html-in-header").arg(&favicon)
-           .arg("--markdown-playground-url")
-           .arg("https://play.rust-lang.org/")
-           .arg("-o").arg(out)
-           .arg(&path);
-
-        if filename == "reference.md" {
-           cmd.arg("--html-in-header").arg(&full_toc);
-        }
-
-        if filename == "not_found.md" {
-            cmd.arg("--markdown-no-toc")
-               .arg("--markdown-css")
-               .arg("https://doc.rust-lang.org/rust.css");
-        } else {
-            cmd.arg("--markdown-css").arg("rust.css");
-        }
-        build.run(&mut cmd);
-    }
-}
-
-/// Compile all standard library documentation.
-///
-/// This will generate all documentation for the standard library and its
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} std ({})", stage, target);
-    t!(fs::create_dir_all(out));
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Libstd)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    build.clear_if_dirty(&out_dir, &rustdoc);
-
-    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"))
-         .arg("--features").arg(build.std_features());
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Compile all libtest documentation.
-///
-/// This will generate all documentation for libtest and its dependencies. This
-/// is largely just a wrapper around `cargo doc`.
-pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} test ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Libtest)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    build.clear_if_dirty(&out_dir, &rustdoc);
-
-    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Generate all compiler documentation.
-///
-/// This will generate all documentation for the compiler libraries and their
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} compiler ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Librustc)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-    if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) {
-        t!(fs::remove_dir_all(&out_dir));
-    }
-    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"))
-         .arg("--features").arg(build.rustc_features());
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Generates the HTML rendered error-index by running the
-/// `error_index_generator` tool.
-pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} error index ({})", stage, target);
-    t!(fs::create_dir_all(out));
-    let compiler = Compiler::new(stage, &build.config.build);
-    let mut index = build.tool_cmd(&compiler, "error_index_generator");
-    index.arg("html");
-    index.arg(out.join("error-index.html"));
-
-    // FIXME: shouldn't have to pass this env var
-    index.env("CFG_BUILD", &build.config.build);
-
-    build.run(&mut index);
-}
diff --git a/src/bootstrap/build/flags.rs b/src/bootstrap/build/flags.rs
deleted file mode 100644 (file)
index d925997..0000000
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Command-line interface of the rustbuild build system.
-//!
-//! This module implements the command-line parsing of the build system which
-//! has various flags to configure how it's run.
-
-use std::fs;
-use std::path::PathBuf;
-use std::process;
-use std::slice;
-
-use getopts::Options;
-
-/// Deserialized version of all flags for this compile.
-pub struct Flags {
-    pub verbose: bool,
-    pub stage: Option<u32>,
-    pub build: String,
-    pub host: Filter,
-    pub target: Filter,
-    pub step: Vec<String>,
-    pub config: Option<PathBuf>,
-    pub src: Option<PathBuf>,
-    pub jobs: Option<u32>,
-    pub args: Vec<String>,
-    pub clean: bool,
-}
-
-pub struct Filter {
-    values: Vec<String>,
-}
-
-impl Flags {
-    pub fn parse(args: &[String]) -> Flags {
-        let mut opts = Options::new();
-        opts.optflag("v", "verbose", "use verbose output");
-        opts.optopt("", "config", "TOML configuration file for build", "FILE");
-        opts.optmulti("", "host", "host targets to build", "HOST");
-        opts.reqopt("", "build", "build target of the stage0 compiler", "BUILD");
-        opts.optmulti("", "target", "targets to build", "TARGET");
-        opts.optmulti("s", "step", "build step to execute", "STEP");
-        opts.optopt("", "stage", "stage to build", "N");
-        opts.optopt("", "src", "path to repo root", "DIR");
-        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
-        opts.optflag("", "clean", "clean output directory");
-        opts.optflag("h", "help", "print this help message");
-
-        let usage = |n| -> ! {
-            let brief = format!("Usage: rust.py [options]");
-            print!("{}", opts.usage(&brief));
-            process::exit(n);
-        };
-
-        let m = opts.parse(args).unwrap_or_else(|e| {
-            println!("failed to parse options: {}", e);
-            usage(1);
-        });
-        if m.opt_present("h") {
-            usage(0);
-        }
-
-        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
-            if fs::metadata("config.toml").is_ok() {
-                Some(PathBuf::from("config.toml"))
-            } else {
-                None
-            }
-        });
-
-        Flags {
-            verbose: m.opt_present("v"),
-            clean: m.opt_present("clean"),
-            stage: m.opt_str("stage").map(|j| j.parse().unwrap()),
-            build: m.opt_str("build").unwrap(),
-            host: Filter { values: m.opt_strs("host") },
-            target: Filter { values: m.opt_strs("target") },
-            step: m.opt_strs("step"),
-            config: cfg_file,
-            src: m.opt_str("src").map(PathBuf::from),
-            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
-            args: m.free.clone(),
-        }
-    }
-}
-
-impl Filter {
-    pub fn contains(&self, name: &str) -> bool {
-        self.values.len() == 0 || self.values.iter().any(|s| s == name)
-    }
-
-    pub fn iter(&self) -> slice::Iter<String> {
-        self.values.iter()
-    }
-}
diff --git a/src/bootstrap/build/job.rs b/src/bootstrap/build/job.rs
deleted file mode 100644 (file)
index 4558e6f..0000000
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Job management on Windows for bootstrapping
-//!
-//! Most of the time when you're running a build system (e.g. make) you expect
-//! Ctrl-C or abnormal termination to actually terminate the entire tree of
-//! process in play, not just the one at the top. This currently works "by
-//! default" on Unix platforms because Ctrl-C actually sends a signal to the
-//! *process group* rather than the parent process, so everything will get torn
-//! down. On Windows, however, this does not happen and Ctrl-C just kills the
-//! parent process.
-//!
-//! To achieve the same semantics on Windows we use Job Objects to ensure that
-//! all processes die at the same time. Job objects have a mode of operation
-//! where when all handles to the object are closed it causes all child
-//! processes associated with the object to be terminated immediately.
-//! Conveniently whenever a process in the job object spawns a new process the
-//! child will be associated with the job object as well. This means if we add
-//! ourselves to the job object we create then everything will get torn down!
-//!
-//! Unfortunately most of the time the build system is actually called from a
-//! python wrapper (which manages things like building the build system) so this
-//! all doesn't quite cut it so far. To go the last mile we duplicate the job
-//! object handle into our parent process (a python process probably) and then
-//! close our own handle. This means that the only handle to the job object
-//! resides in the parent python process, so when python dies the whole build
-//! system dies (as one would probably expect!).
-//!
-//! Note that this module has a #[cfg(windows)] above it as none of this logic
-//! is required on Unix.
-
-extern crate kernel32;
-extern crate winapi;
-
-use std::env;
-use std::io;
-use std::mem;
-
-use self::winapi::*;
-use self::kernel32::*;
-
-pub unsafe fn setup() {
-    // Create a new job object for us to use
-    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
-    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
-
-    // Indicate that when all handles to the job object are gone that all
-    // process in the object should be killed. Note that this includes our
-    // entire process tree by default because we've added ourselves and our
-    // children will reside in the job by default.
-    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
-    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
-    let r = SetInformationJobObject(job,
-                                    JobObjectExtendedLimitInformation,
-                                    &mut info as *mut _ as LPVOID,
-                                    mem::size_of_val(&info) as DWORD);
-    assert!(r != 0, "{}", io::Error::last_os_error());
-
-    // Assign our process to this job object. Note that if this fails, one very
-    // likely reason is that we are ourselves already in a job object! This can
-    // happen on the build bots that we've got for Windows, or if just anyone
-    // else is instrumenting the build. In this case we just bail out
-    // immediately and assume that they take care of it.
-    //
-    // Also note that nested jobs (why this might fail) are supported in recent
-    // versions of Windows, but the version of Windows that our bots are running
-    // at least don't support nested job objects.
-    let r = AssignProcessToJobObject(job, GetCurrentProcess());
-    if r == 0 {
-        CloseHandle(job);
-        return
-    }
-
-    // If we've got a parent process (e.g. the python script that called us)
-    // then move ownership of this job object up to them. That way if the python
-    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
-    //
-    // If we don't have a parent (e.g. this was run directly) then we
-    // intentionally leak the job object handle. When our process exits
-    // (normally or abnormally) it will close the handle implicitly, causing all
-    // processes in the job to be cleaned up.
-    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
-        Ok(s) => s,
-        Err(..) => return,
-    };
-
-    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
-    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
-    let mut parent_handle = 0 as *mut _;
-    let r = DuplicateHandle(GetCurrentProcess(), job,
-                            parent, &mut parent_handle,
-                            0, FALSE, DUPLICATE_SAME_ACCESS);
-
-    // If this failed, well at least we tried! An example of DuplicateHandle
-    // failing in the past has been when the wrong python2 package spawed this
-    // build system (e.g. the `python2` package in MSYS instead of
-    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
-    // mode" here is that we only clean everything up when the build system
-    // dies, not when the python parent does, so not too bad.
-    if r != 0 {
-        CloseHandle(job);
-    }
-}
diff --git a/src/bootstrap/build/mod.rs b/src/bootstrap/build/mod.rs
deleted file mode 100644 (file)
index 195d1bc..0000000
+++ /dev/null
@@ -1,871 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of rustbuild, the Rust build system.
-//!
-//! This module, and its descendants, are the implementation of the Rust build
-//! system. Most of this build system is backed by Cargo but the outer layer
-//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
-//! builds, building artifacts like LLVM, etc.
-//!
-//! More documentation can be found in each respective module below.
-
-use std::cell::RefCell;
-use std::collections::HashMap;
-use std::env;
-use std::fs::{self, File};
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build_helper::{run_silent, output};
-use gcc;
-use num_cpus;
-
-use build::util::{exe, mtime, libdir, add_lib_path};
-
-/// A helper macro to `unwrap` a result except also print out details like:
-///
-/// * The file/line of the panic
-/// * The expression that failed
-/// * The error itself
-///
-/// This is currently used judiciously throughout the build system rather than
-/// using a `Result` with `try!`, but this may change on day...
-macro_rules! t {
-    ($e:expr) => (match $e {
-        Ok(e) => e,
-        Err(e) => panic!("{} failed with {}", stringify!($e), e),
-    })
-}
-
-mod cc;
-mod channel;
-mod check;
-mod clean;
-mod compile;
-mod config;
-mod dist;
-mod doc;
-mod flags;
-mod native;
-mod sanity;
-mod step;
-mod util;
-
-#[cfg(windows)]
-mod job;
-
-#[cfg(not(windows))]
-mod job {
-    pub unsafe fn setup() {}
-}
-
-pub use build::config::Config;
-pub use build::flags::Flags;
-
-/// A structure representing a Rust compiler.
-///
-/// Each compiler has a `stage` that it is associated with and a `host` that
-/// corresponds to the platform the compiler runs on. This structure is used as
-/// a parameter to many methods below.
-#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
-pub struct Compiler<'a> {
-    stage: u32,
-    host: &'a str,
-}
-
-/// Global configuration for the build system.
-///
-/// This structure transitively contains all configuration for the build system.
-/// All filesystem-encoded configuration is in `config`, all flags are in
-/// `flags`, and then parsed or probed information is listed in the keys below.
-///
-/// This structure is a parameter of almost all methods in the build system,
-/// although most functions are implemented as free functions rather than
-/// methods specifically on this structure itself (to make it easier to
-/// organize).
-pub struct Build {
-    // User-specified configuration via config.toml
-    config: Config,
-
-    // User-specified configuration via CLI flags
-    flags: Flags,
-
-    // Derived properties from the above two configurations
-    cargo: PathBuf,
-    rustc: PathBuf,
-    src: PathBuf,
-    out: PathBuf,
-    release: String,
-    unstable_features: bool,
-    ver_hash: Option<String>,
-    short_ver_hash: Option<String>,
-    ver_date: Option<String>,
-    version: String,
-    package_vers: String,
-    bootstrap_key: String,
-    bootstrap_key_stage0: String,
-
-    // Probed tools at runtime
-    gdb_version: Option<String>,
-    lldb_version: Option<String>,
-    lldb_python_dir: Option<String>,
-
-    // Runtime state filled in later on
-    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
-    cxx: HashMap<String, gcc::Tool>,
-    compiler_rt_built: RefCell<HashMap<String, PathBuf>>,
-}
-
-/// The various "modes" of invoking Cargo.
-///
-/// These entries currently correspond to the various output directories of the
-/// build system, with each mod generating output in a different directory.
-#[derive(Clone, Copy)]
-pub enum Mode {
-    /// This cargo is going to build the standard library, placing output in the
-    /// "stageN-std" directory.
-    Libstd,
-
-    /// This cargo is going to build libtest, placing output in the
-    /// "stageN-test" directory.
-    Libtest,
-
-    /// This cargo is going to build librustc and compiler libraries, placing
-    /// output in the "stageN-rustc" directory.
-    Librustc,
-
-    /// This cargo is going to some build tool, placing output in the
-    /// "stageN-tools" directory.
-    Tool,
-}
-
-impl Build {
-    /// Creates a new set of build configuration from the `flags` on the command
-    /// line and the filesystem `config`.
-    ///
-    /// By default all build output will be placed in the current directory.
-    pub fn new(flags: Flags, config: Config) -> Build {
-        let cwd = t!(env::current_dir());
-        let src = flags.src.clone().unwrap_or(cwd.clone());
-        let out = cwd.join("build");
-
-        let stage0_root = out.join(&config.build).join("stage0/bin");
-        let rustc = match config.rustc {
-            Some(ref s) => PathBuf::from(s),
-            None => stage0_root.join(exe("rustc", &config.build)),
-        };
-        let cargo = match config.cargo {
-            Some(ref s) => PathBuf::from(s),
-            None => stage0_root.join(exe("cargo", &config.build)),
-        };
-
-        Build {
-            flags: flags,
-            config: config,
-            cargo: cargo,
-            rustc: rustc,
-            src: src,
-            out: out,
-
-            release: String::new(),
-            unstable_features: false,
-            ver_hash: None,
-            short_ver_hash: None,
-            ver_date: None,
-            version: String::new(),
-            bootstrap_key: String::new(),
-            bootstrap_key_stage0: String::new(),
-            package_vers: String::new(),
-            cc: HashMap::new(),
-            cxx: HashMap::new(),
-            compiler_rt_built: RefCell::new(HashMap::new()),
-            gdb_version: None,
-            lldb_version: None,
-            lldb_python_dir: None,
-        }
-    }
-
-    /// Executes the entire build, as configured by the flags and configuration.
-    pub fn build(&mut self) {
-        use build::step::Source::*;
-
-        unsafe {
-            job::setup();
-        }
-
-        if self.flags.clean {
-            return clean::clean(self);
-        }
-
-        self.verbose("finding compilers");
-        cc::find(self);
-        self.verbose("running sanity check");
-        sanity::check(self);
-        self.verbose("collecting channel variables");
-        channel::collect(self);
-        self.verbose("updating submodules");
-        self.update_submodules();
-
-        // The main loop of the build system.
-        //
-        // The `step::all` function returns a topographically sorted list of all
-        // steps that need to be executed as part of this build. Each step has a
-        // corresponding entry in `step.rs` and indicates some unit of work that
-        // needs to be done as part of the build.
-        //
-        // Almost all of these are simple one-liners that shell out to the
-        // corresponding functionality in the extra modules, where more
-        // documentation can be found.
-        for target in step::all(self) {
-            let doc_out = self.out.join(&target.target).join("doc");
-            match target.src {
-                Llvm { _dummy } => {
-                    native::llvm(self, target.target);
-                }
-                CompilerRt { _dummy } => {
-                    native::compiler_rt(self, target.target);
-                }
-                TestHelpers { _dummy } => {
-                    native::test_helpers(self, target.target);
-                }
-                Libstd { compiler } => {
-                    compile::std(self, target.target, &compiler);
-                }
-                Libtest { compiler } => {
-                    compile::test(self, target.target, &compiler);
-                }
-                Librustc { compiler } => {
-                    compile::rustc(self, target.target, &compiler);
-                }
-                LibstdLink { compiler, host } => {
-                    compile::std_link(self, target.target, &compiler, host);
-                }
-                LibtestLink { compiler, host } => {
-                    compile::test_link(self, target.target, &compiler, host);
-                }
-                LibrustcLink { compiler, host } => {
-                    compile::rustc_link(self, target.target, &compiler, host);
-                }
-                Rustc { stage: 0 } => {
-                    // nothing to do...
-                }
-                Rustc { stage } => {
-                    compile::assemble_rustc(self, stage, target.target);
-                }
-                ToolLinkchecker { stage } => {
-                    compile::tool(self, stage, target.target, "linkchecker");
-                }
-                ToolRustbook { stage } => {
-                    compile::tool(self, stage, target.target, "rustbook");
-                }
-                ToolErrorIndex { stage } => {
-                    compile::tool(self, stage, target.target,
-                                  "error_index_generator");
-                }
-                ToolCargoTest { stage } => {
-                    compile::tool(self, stage, target.target, "cargotest");
-                }
-                ToolTidy { stage } => {
-                    compile::tool(self, stage, target.target, "tidy");
-                }
-                ToolCompiletest { stage } => {
-                    compile::tool(self, stage, target.target, "compiletest");
-                }
-                DocBook { stage } => {
-                    doc::rustbook(self, stage, target.target, "book", &doc_out);
-                }
-                DocNomicon { stage } => {
-                    doc::rustbook(self, stage, target.target, "nomicon",
-                                  &doc_out);
-                }
-                DocStyle { stage } => {
-                    doc::rustbook(self, stage, target.target, "style",
-                                  &doc_out);
-                }
-                DocStandalone { stage } => {
-                    doc::standalone(self, stage, target.target, &doc_out);
-                }
-                DocStd { stage } => {
-                    doc::std(self, stage, target.target, &doc_out);
-                }
-                DocTest { stage } => {
-                    doc::test(self, stage, target.target, &doc_out);
-                }
-                DocRustc { stage } => {
-                    doc::rustc(self, stage, target.target, &doc_out);
-                }
-                DocErrorIndex { stage } => {
-                    doc::error_index(self, stage, target.target, &doc_out);
-                }
-
-                CheckLinkcheck { stage } => {
-                    check::linkcheck(self, stage, target.target);
-                }
-                CheckCargoTest { stage } => {
-                    check::cargotest(self, stage, target.target);
-                }
-                CheckTidy { stage } => {
-                    check::tidy(self, stage, target.target);
-                }
-                CheckRPass { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass", "run-pass");
-                }
-                CheckRPassFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass", "run-pass-fulldeps");
-                }
-                CheckCFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "compile-fail", "compile-fail");
-                }
-                CheckCFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "compile-fail", "compile-fail-fulldeps")
-                }
-                CheckPFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "parse-fail", "parse-fail");
-                }
-                CheckRFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-fail", "run-fail");
-                }
-                CheckRFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-fail", "run-fail-fulldeps");
-                }
-                CheckPretty { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "pretty");
-                }
-                CheckPrettyRPass { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass");
-                }
-                CheckPrettyRPassFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass-fulldeps");
-                }
-                CheckPrettyRFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-fail");
-                }
-                CheckPrettyRFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-fail-fulldeps");
-                }
-                CheckPrettyRPassValgrind { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass-valgrind");
-                }
-                CheckCodegen { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "codegen", "codegen");
-                }
-                CheckCodegenUnits { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "codegen-units", "codegen-units");
-                }
-                CheckIncremental { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "incremental", "incremental");
-                }
-                CheckUi { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "ui", "ui");
-                }
-                CheckDebuginfo { compiler } => {
-                    if target.target.contains("msvc") {
-                        // nothing to do
-                    } else if target.target.contains("apple") {
-                        check::compiletest(self, &compiler, target.target,
-                                           "debuginfo-lldb", "debuginfo");
-                    } else {
-                        check::compiletest(self, &compiler, target.target,
-                                           "debuginfo-gdb", "debuginfo");
-                    }
-                }
-                CheckRustdoc { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "rustdoc", "rustdoc");
-                }
-                CheckRPassValgrind { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass-valgrind", "run-pass-valgrind");
-                }
-                CheckDocs { compiler } => {
-                    check::docs(self, &compiler);
-                }
-                CheckErrorIndex { compiler } => {
-                    check::error_index(self, &compiler);
-                }
-                CheckRMake { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-make", "run-make")
-                }
-                CheckCrateStd { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Libstd)
-                }
-                CheckCrateTest { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Libtest)
-                }
-                CheckCrateRustc { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Librustc)
-                }
-
-                DistDocs { stage } => dist::docs(self, stage, target.target),
-                DistMingw { _dummy } => dist::mingw(self, target.target),
-                DistRustc { stage } => dist::rustc(self, stage, target.target),
-                DistStd { compiler } => dist::std(self, &compiler, target.target),
-
-                DebuggerScripts { stage } => {
-                    let compiler = Compiler::new(stage, target.target);
-                    dist::debugger_scripts(self,
-                                           &self.sysroot(&compiler),
-                                           target.target);
-                }
-
-                AndroidCopyLibs { compiler } => {
-                    check::android_copy_libs(self, &compiler, target.target);
-                }
-
-                // pseudo-steps
-                Dist { .. } |
-                Doc { .. } |
-                CheckTarget { .. } |
-                Check { .. } => {}
-            }
-        }
-    }
-
-    /// Updates all git submodules that we have.
-    ///
-    /// This will detect if any submodules are out of date an run the necessary
-    /// commands to sync them all with upstream.
-    fn update_submodules(&self) {
-        if !self.config.submodules {
-            return
-        }
-        if fs::metadata(self.src.join(".git")).is_err() {
-            return
-        }
-        let git_submodule = || {
-            let mut cmd = Command::new("git");
-            cmd.current_dir(&self.src).arg("submodule");
-            return cmd
-        };
-
-        // FIXME: this takes a seriously long time to execute on Windows and a
-        //        nontrivial amount of time on Unix, we should have a better way
-        //        of detecting whether we need to run all the submodule commands
-        //        below.
-        let out = output(git_submodule().arg("status"));
-        if !out.lines().any(|l| l.starts_with("+") || l.starts_with("-")) {
-            return
-        }
-
-        self.run(git_submodule().arg("sync"));
-        self.run(git_submodule().arg("init"));
-        self.run(git_submodule().arg("update"));
-        self.run(git_submodule().arg("update").arg("--recursive"));
-        self.run(git_submodule().arg("status").arg("--recursive"));
-        self.run(git_submodule().arg("foreach").arg("--recursive")
-                                .arg("git").arg("clean").arg("-fdx"));
-        self.run(git_submodule().arg("foreach").arg("--recursive")
-                                .arg("git").arg("checkout").arg("."));
-    }
-
-    /// Clear out `dir` if `input` is newer.
-    ///
-    /// After this executes, it will also ensure that `dir` exists.
-    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
-        let stamp = dir.join(".stamp");
-        if mtime(&stamp) < mtime(input) {
-            self.verbose(&format!("Dirty - {}", dir.display()));
-            let _ = fs::remove_dir_all(dir);
-        }
-        t!(fs::create_dir_all(dir));
-        t!(File::create(stamp));
-    }
-
-    /// Prepares an invocation of `cargo` to be run.
-    ///
-    /// This will create a `Command` that represents a pending execution of
-    /// Cargo. This cargo will be configured to use `compiler` as the actual
-    /// rustc compiler, its output will be scoped by `mode`'s output directory,
-    /// it will pass the `--target` flag for the specified `target`, and will be
-    /// executing the Cargo command `cmd`.
-    fn cargo(&self,
-             compiler: &Compiler,
-             mode: Mode,
-             target: &str,
-             cmd: &str) -> Command {
-        let mut cargo = Command::new(&self.cargo);
-        let out_dir = self.stage_out(compiler, mode);
-        cargo.env("CARGO_TARGET_DIR", out_dir)
-             .arg(cmd)
-             .arg("-j").arg(self.jobs().to_string())
-             .arg("--target").arg(target);
-
-        let stage;
-        if compiler.stage == 0 && self.config.local_rebuild {
-            // Assume the local-rebuild rustc already has stage1 features.
-            stage = 1;
-        } else {
-            stage = compiler.stage;
-        }
-
-        // Customize the compiler we're running. Specify the compiler to cargo
-        // as our shim and then pass it some various options used to configure
-        // how the actual compiler itself is called.
-        //
-        // These variables are primarily all read by
-        // src/bootstrap/{rustc,rustdoc.rs}
-        cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
-             .env("RUSTC_REAL", self.compiler_path(compiler))
-             .env("RUSTC_STAGE", stage.to_string())
-             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
-             .env("RUSTC_CODEGEN_UNITS",
-                  self.config.rust_codegen_units.to_string())
-             .env("RUSTC_DEBUG_ASSERTIONS",
-                  self.config.rust_debug_assertions.to_string())
-             .env("RUSTC_SNAPSHOT", &self.rustc)
-             .env("RUSTC_SYSROOT", self.sysroot(compiler))
-             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
-             .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir())
-             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
-             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
-             .env("RUSTDOC_REAL", self.rustdoc(compiler))
-             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
-
-        self.add_bootstrap_key(compiler, &mut cargo);
-
-        // Specify some various options for build scripts used throughout
-        // the build.
-        //
-        // FIXME: the guard against msvc shouldn't need to be here
-        if !target.contains("msvc") {
-            cargo.env(format!("CC_{}", target), self.cc(target))
-                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
-                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
-        }
-
-        // If we're building for OSX, inform the compiler and the linker that
-        // we want to build a compiler runnable on 10.7
-        if target.contains("apple-darwin") {
-            cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7");
-        }
-
-        // Environment variables *required* needed throughout the build
-        //
-        // FIXME: should update code to not require this env var
-        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
-
-        if self.config.verbose || self.flags.verbose {
-            cargo.arg("-v");
-        }
-        if self.config.rust_optimize {
-            cargo.arg("--release");
-        }
-        return cargo
-    }
-
-    /// Get a path to the compiler specified.
-    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.rustc.clone()
-        } else {
-            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
-        }
-    }
-
-    /// Get the specified tool built by the specified compiler
-    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
-        self.cargo_out(compiler, Mode::Tool, compiler.host)
-            .join(exe(tool, compiler.host))
-    }
-
-    /// Get the `rustdoc` executable next to the specified compiler
-    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
-        let mut rustdoc = self.compiler_path(compiler);
-        rustdoc.pop();
-        rustdoc.push(exe("rustdoc", compiler.host));
-        return rustdoc
-    }
-
-    /// Get a `Command` which is ready to run `tool` in `stage` built for
-    /// `host`.
-    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
-        let mut cmd = Command::new(self.tool(&compiler, tool));
-        let host = compiler.host;
-        let paths = vec![
-            self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
-            self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
-            self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
-            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
-        ];
-        add_lib_path(paths, &mut cmd);
-        return cmd
-    }
-
-    /// Get the space-separated set of activated features for the standard
-    /// library.
-    fn std_features(&self) -> String {
-        let mut features = String::new();
-        if self.config.debug_jemalloc {
-            features.push_str(" debug-jemalloc");
-        }
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
-        }
-        return features
-    }
-
-    /// Get the space-separated set of activated features for the compiler.
-    fn rustc_features(&self) -> String {
-        let mut features = String::new();
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
-        }
-        return features
-    }
-
-    /// Component directory that Cargo will produce output into (e.g.
-    /// release/debug)
-    fn cargo_dir(&self) -> &'static str {
-        if self.config.rust_optimize {"release"} else {"debug"}
-    }
-
-    /// Returns the sysroot for the `compiler` specified that *this build system
-    /// generates*.
-    ///
-    /// That is, the sysroot for the stage0 compiler is not what the compiler
-    /// thinks it is by default, but it's the same as the default for stages
-    /// 1-3.
-    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.stage == 0 {
-            self.out.join(compiler.host).join("stage0-sysroot")
-        } else {
-            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
-        }
-    }
-
-    /// Returns the libdir where the standard library and other artifacts are
-    /// found for a compiler's sysroot.
-    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
-        self.sysroot(compiler).join("lib").join("rustlib")
-            .join(target).join("lib")
-    }
-
-    /// Returns the root directory for all output generated in a particular
-    /// stage when running with a particular host compiler.
-    ///
-    /// The mode indicates what the root directory is for.
-    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
-        let suffix = match mode {
-            Mode::Libstd => "-std",
-            Mode::Libtest => "-test",
-            Mode::Tool => "-tools",
-            Mode::Librustc => "-rustc",
-        };
-        self.out.join(compiler.host)
-                .join(format!("stage{}{}", compiler.stage, suffix))
-    }
-
-    /// Returns the root output directory for all Cargo output in a given stage,
-    /// running a particular comipler, wehther or not we're building the
-    /// standard library, and targeting the specified architecture.
-    fn cargo_out(&self,
-                 compiler: &Compiler,
-                 mode: Mode,
-                 target: &str) -> PathBuf {
-        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
-    }
-
-    /// Root output directory for LLVM compiled for `target`
-    ///
-    /// Note that if LLVM is configured externally then the directory returned
-    /// will likely be empty.
-    fn llvm_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("llvm")
-    }
-
-    /// Returns the path to `llvm-config` for the specified target.
-    ///
-    /// If a custom `llvm-config` was specified for target then that's returned
-    /// instead.
-    fn llvm_config(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
-        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-            s.clone()
-        } else {
-            self.llvm_out(&self.config.build).join("bin")
-                .join(exe("llvm-config", target))
-        }
-    }
-
-    /// Returns the path to `FileCheck` binary for the specified target
-    fn llvm_filecheck(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
-        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-            s.parent().unwrap().join(exe("FileCheck", target))
-        } else {
-            let base = self.llvm_out(&self.config.build).join("build");
-            let exe = exe("FileCheck", target);
-            if self.config.build.contains("msvc") {
-                base.join("Release/bin").join(exe)
-            } else {
-                base.join("bin").join(exe)
-            }
-        }
-    }
-
-    /// Root output directory for compiler-rt compiled for `target`
-    fn compiler_rt_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("compiler-rt")
-    }
-
-    /// Root output directory for rust_test_helpers library compiled for
-    /// `target`
-    fn test_helpers_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("rust-test-helpers")
-    }
-
-    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
-    /// library lookup path.
-    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
-        // Windows doesn't need dylib path munging because the dlls for the
-        // compiler live next to the compiler and the system will find them
-        // automatically.
-        if cfg!(windows) {
-            return
-        }
-
-        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
-    }
-
-    /// Adds the compiler's bootstrap key to the environment of `cmd`.
-    fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) {
-        // In stage0 we're using a previously released stable compiler, so we
-        // use the stage0 bootstrap key. Otherwise we use our own build's
-        // bootstrap key.
-        let bootstrap_key = if compiler.is_snapshot(self) && !self.config.local_rebuild {
-            &self.bootstrap_key_stage0
-        } else {
-            &self.bootstrap_key
-        };
-        cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key);
-    }
-
-    /// Returns the compiler's libdir where it stores the dynamic libraries that
-    /// it itself links against.
-    ///
-    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
-    /// Windows.
-    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.rustc_snapshot_libdir()
-        } else {
-            self.sysroot(compiler).join(libdir(compiler.host))
-        }
-    }
-
-    /// Returns the libdir of the snapshot compiler.
-    fn rustc_snapshot_libdir(&self) -> PathBuf {
-        self.rustc.parent().unwrap().parent().unwrap()
-            .join(libdir(&self.config.build))
-    }
-
-    /// Runs a command, printing out nice contextual information if it fails.
-    fn run(&self, cmd: &mut Command) {
-        self.verbose(&format!("running: {:?}", cmd));
-        run_silent(cmd)
-    }
-
-    /// Prints a message if this build is configured in verbose mode.
-    fn verbose(&self, msg: &str) {
-        if self.flags.verbose || self.config.verbose {
-            println!("{}", msg);
-        }
-    }
-
-    /// Returns the number of parallel jobs that have been configured for this
-    /// build.
-    fn jobs(&self) -> u32 {
-        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
-    }
-
-    /// Returns the path to the C compiler for the target specified.
-    fn cc(&self, target: &str) -> &Path {
-        self.cc[target].0.path()
-    }
-
-    /// Returns a list of flags to pass to the C compiler for the target
-    /// specified.
-    fn cflags(&self, target: &str) -> Vec<String> {
-        // Filter out -O and /O (the optimization flags) that we picked up from
-        // gcc-rs because the build scripts will determine that for themselves.
-        let mut base = self.cc[target].0.args().iter()
-                           .map(|s| s.to_string_lossy().into_owned())
-                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
-                           .collect::<Vec<_>>();
-
-        // If we're compiling on OSX then we add a few unconditional flags
-        // indicating that we want libc++ (more filled out than libstdc++) and
-        // we want to compile for 10.7. This way we can ensure that
-        // LLVM/jemalloc/etc are all properly compiled.
-        if target.contains("apple-darwin") {
-            base.push("-stdlib=libc++".into());
-            base.push("-mmacosx-version-min=10.7".into());
-        }
-        return base
-    }
-
-    /// Returns the path to the `ar` archive utility for the target specified.
-    fn ar(&self, target: &str) -> Option<&Path> {
-        self.cc[target].1.as_ref().map(|p| &**p)
-    }
-
-    /// Returns the path to the C++ compiler for the target specified, may panic
-    /// if no C++ compiler was configured for the target.
-    fn cxx(&self, target: &str) -> &Path {
-        self.cxx[target].path()
-    }
-
-    /// Returns flags to pass to the compiler to generate code for `target`.
-    fn rustc_flags(&self, target: &str) -> Vec<String> {
-        // New flags should be added here with great caution!
-        //
-        // It's quite unfortunate to **require** flags to generate code for a
-        // target, so it should only be passed here if absolutely necessary!
-        // Most default configuration should be done through target specs rather
-        // than an entry here.
-
-        let mut base = Vec::new();
-        if target != self.config.build && !target.contains("msvc") {
-            base.push(format!("-Clinker={}", self.cc(target).display()));
-        }
-        return base
-    }
-}
-
-impl<'a> Compiler<'a> {
-    /// Creates a new complier for the specified stage/host
-    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
-        Compiler { stage: stage, host: host }
-    }
-
-    /// Returns whether this is a snapshot compiler for `build`'s configuration
-    fn is_snapshot(&self, build: &Build) -> bool {
-        self.stage == 0 && self.host == build.config.build
-    }
-}
diff --git a/src/bootstrap/build/native.rs b/src/bootstrap/build/native.rs
deleted file mode 100644 (file)
index f6030cf..0000000
+++ /dev/null
@@ -1,238 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Compilation of native dependencies like LLVM.
-//!
-//! Native projects like LLVM unfortunately aren't suited just yet for
-//! compilation in build scripts that Cargo has. This is because thie
-//! compilation takes a *very* long time but also because we don't want to
-//! compile LLVM 3 times as part of a normal bootstrap (we want it cached).
-//!
-//! LLVM and compiler-rt are essentially just wired up to everything else to
-//! ensure that they're always in place if needed.
-
-use std::path::Path;
-use std::process::Command;
-use std::fs::{self, File};
-
-use build_helper::output;
-use cmake;
-use gcc;
-
-use build::Build;
-use build::util::{staticlib, up_to_date};
-
-/// Compile LLVM for `target`.
-pub fn llvm(build: &Build, target: &str) {
-    // If we're using a custom LLVM bail out here, but we can only use a
-    // custom LLVM for the build triple.
-    if let Some(config) = build.config.target_config.get(target) {
-        if let Some(ref s) = config.llvm_config {
-            return check_llvm_version(build, s);
-        }
-    }
-
-    // If the cleaning trigger is newer than our built artifacts (or if the
-    // artifacts are missing) then we keep going, otherwise we bail out.
-    let dst = build.llvm_out(target);
-    let stamp = build.src.join("src/rustllvm/llvm-auto-clean-trigger");
-    let done_stamp = dst.join("llvm-finished-building");
-    build.clear_if_dirty(&dst, &stamp);
-    if fs::metadata(&done_stamp).is_ok() {
-        return
-    }
-
-    println!("Building LLVM for {}", target);
-
-    let _ = fs::remove_dir_all(&dst.join("build"));
-    t!(fs::create_dir_all(&dst.join("build")));
-    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
-
-    // http://llvm.org/docs/CMake.html
-    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
-    if build.config.ninja {
-        cfg.generator("Ninja");
-    }
-    cfg.target(target)
-       .host(&build.config.build)
-       .out_dir(&dst)
-       .profile(if build.config.llvm_optimize {"Release"} else {"Debug"})
-       .define("LLVM_ENABLE_ASSERTIONS", assertions)
-       .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC")
-       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
-       .define("LLVM_INCLUDE_TESTS", "OFF")
-       .define("LLVM_INCLUDE_DOCS", "OFF")
-       .define("LLVM_ENABLE_ZLIB", "OFF")
-       .define("WITH_POLLY", "OFF")
-       .define("LLVM_ENABLE_TERMINFO", "OFF")
-       .define("LLVM_ENABLE_LIBEDIT", "OFF")
-       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string());
-
-    if target.starts_with("i686") {
-        cfg.define("LLVM_BUILD_32_BITS", "ON");
-    }
-
-    // http://llvm.org/docs/HowToCrossCompileLLVM.html
-    if target != build.config.build {
-        // FIXME: if the llvm root for the build triple is overridden then we
-        //        should use llvm-tblgen from there, also should verify that it
-        //        actually exists most of the time in normal installs of LLVM.
-        let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
-        cfg.define("CMAKE_CROSSCOMPILING", "True")
-           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
-           .define("LLVM_TABLEGEN", &host)
-           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
-    }
-
-    // MSVC handles compiler business itself
-    if !target.contains("msvc") {
-        if build.config.ccache {
-           cfg.define("CMAKE_C_COMPILER", "ccache")
-              .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
-              .define("CMAKE_CXX_COMPILER", "ccache")
-              .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
-        } else {
-           cfg.define("CMAKE_C_COMPILER", build.cc(target))
-              .define("CMAKE_CXX_COMPILER", build.cxx(target));
-        }
-        cfg.build_arg("-j").build_arg(build.jobs().to_string());
-
-        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
-        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
-    }
-
-    // FIXME: we don't actually need to build all LLVM tools and all LLVM
-    //        libraries here, e.g. we just want a few components and a few
-    //        tools. Figure out how to filter them down and only build the right
-    //        tools and libs on all platforms.
-    cfg.build();
-
-    t!(File::create(&done_stamp));
-}
-
-fn check_llvm_version(build: &Build, llvm_config: &Path) {
-    if !build.config.llvm_version_check {
-        return
-    }
-
-    let mut cmd = Command::new(llvm_config);
-    let version = output(cmd.arg("--version"));
-    if version.starts_with("3.5") || version.starts_with("3.6") ||
-       version.starts_with("3.7") {
-        return
-    }
-    panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
-}
-
-/// Compiles the `compiler-rt` library, or at least the builtins part of it.
-///
-/// This uses the CMake build system and an existing LLVM build directory to
-/// compile the project.
-pub fn compiler_rt(build: &Build, target: &str) {
-    let dst = build.compiler_rt_out(target);
-    let arch = target.split('-').next().unwrap();
-    let mode = if build.config.rust_optimize {"Release"} else {"Debug"};
-
-    let build_llvm_config = build.llvm_config(&build.config.build);
-    let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt"));
-    cfg.target(target)
-       .host(&build.config.build)
-       .out_dir(&dst)
-       .profile(mode)
-       .define("LLVM_CONFIG_PATH", build_llvm_config)
-       .define("COMPILER_RT_DEFAULT_TARGET_TRIPLE", target)
-       .define("COMPILER_RT_BUILD_SANITIZERS", "OFF")
-       .define("COMPILER_RT_BUILD_EMUTLS", "OFF")
-       // inform about c/c++ compilers, the c++ compiler isn't actually used but
-       // it's needed to get the initial configure to work on all platforms.
-       .define("CMAKE_C_COMPILER", build.cc(target))
-       .define("CMAKE_CXX_COMPILER", build.cc(target));
-
-    let (dir, build_target, libname) = if target.contains("linux") ||
-                                          target.contains("freebsd") ||
-                                          target.contains("netbsd") {
-        let os_extra = if target.contains("android") && target.contains("arm") {
-            "-android"
-        } else {
-            ""
-        };
-        let builtins_arch = match arch {
-            "i586" => "i386",
-            "arm" | "armv7" if target.contains("android") => "armhf",
-            "arm" if target.contains("eabihf") => "armhf",
-            _ => arch,
-        };
-        let target = format!("clang_rt.builtins-{}", builtins_arch);
-        ("linux".to_string(),
-         target.clone(),
-         format!("{}{}", target, os_extra))
-    } else if target.contains("apple-darwin") {
-        let builtins_arch = match arch {
-            "i686" => "i386",
-            _ => arch,
-        };
-        let target = format!("clang_rt.builtins_{}_osx", builtins_arch);
-        ("builtins".to_string(), target.clone(), target)
-    } else if target.contains("apple-ios") {
-        cfg.define("COMPILER_RT_ENABLE_IOS", "ON");
-        let target = match arch {
-            "armv7s" => "hard_pic_armv7em_macho_embedded".to_string(),
-            "aarch64" => "builtins_arm64_ios".to_string(),
-            _ => format!("hard_pic_{}_macho_embedded", arch),
-        };
-        ("builtins".to_string(), target.clone(), target)
-    } else if target.contains("windows-gnu") {
-        let target = format!("clang_rt.builtins-{}", arch);
-        ("windows".to_string(), target.clone(), target)
-    } else if target.contains("windows-msvc") {
-        let builtins_arch = match arch {
-            "i586" | "i686" => "i386",
-            _ => arch,
-        };
-        (format!("windows/{}", mode),
-         "lib/builtins/builtins".to_string(),
-         format!("clang_rt.builtins-{}", builtins_arch))
-    } else {
-        panic!("can't get os from target: {}", target)
-    };
-    let output = dst.join("build/lib").join(dir)
-                    .join(staticlib(&libname, target));
-    build.compiler_rt_built.borrow_mut().insert(target.to_string(),
-                                                output.clone());
-    if fs::metadata(&output).is_ok() {
-        return
-    }
-    let _ = fs::remove_dir_all(&dst);
-    t!(fs::create_dir_all(&dst));
-    cfg.build_target(&build_target);
-    cfg.build();
-}
-
-/// Compiles the `rust_test_helpers.c` library which we used in various
-/// `run-pass` test suites for ABI testing.
-pub fn test_helpers(build: &Build, target: &str) {
-    let dst = build.test_helpers_out(target);
-    let src = build.src.join("src/rt/rust_test_helpers.c");
-    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
-        return
-    }
-
-    println!("Building test helpers");
-    t!(fs::create_dir_all(&dst));
-    let mut cfg = gcc::Config::new();
-    cfg.cargo_metadata(false)
-       .out_dir(&dst)
-       .target(target)
-       .host(&build.config.build)
-       .opt_level(0)
-       .debug(false)
-       .file(build.src.join("src/rt/rust_test_helpers.c"))
-       .compile("librust_test_helpers.a");
-}
diff --git a/src/bootstrap/build/sanity.rs b/src/bootstrap/build/sanity.rs
deleted file mode 100644 (file)
index 5eced00..0000000
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Sanity checking performed by rustbuild before actually executing anything.
-//!
-//! This module contains the implementation of ensuring that the build
-//! environment looks reasonable before progressing. This will verify that
-//! various programs like git and python exist, along with ensuring that all C
-//! compilers for cross-compiling are found.
-//!
-//! In theory if we get past this phase it's a bug if a build fails, but in
-//! practice that's likely not true!
-
-use std::collections::HashSet;
-use std::env;
-use std::ffi::{OsStr, OsString};
-use std::fs;
-use std::process::Command;
-
-use build_helper::output;
-
-use build::Build;
-
-pub fn check(build: &mut Build) {
-    let mut checked = HashSet::new();
-    let path = env::var_os("PATH").unwrap_or(OsString::new());
-    let mut need_cmd = |cmd: &OsStr| {
-        if !checked.insert(cmd.to_owned()) {
-            return
-        }
-        for path in env::split_paths(&path).map(|p| p.join(cmd)) {
-            if fs::metadata(&path).is_ok() ||
-               fs::metadata(path.with_extension("exe")).is_ok() {
-                return
-            }
-        }
-        panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
-    };
-
-    // If we've got a git directory we're gona need git to update
-    // submodules and learn about various other aspects.
-    if fs::metadata(build.src.join(".git")).is_ok() {
-        need_cmd("git".as_ref());
-    }
-
-    // We need cmake, but only if we're actually building LLVM
-    for host in build.config.host.iter() {
-        if let Some(config) = build.config.target_config.get(host) {
-            if config.llvm_config.is_some() {
-                continue
-            }
-        }
-        need_cmd("cmake".as_ref());
-        if build.config.ninja {
-            need_cmd("ninja".as_ref())
-        }
-        break
-    }
-
-    need_cmd("python".as_ref());
-
-    // We're gonna build some custom C code here and there, host triples
-    // also build some C++ shims for LLVM so we need a C++ compiler.
-    for target in build.config.target.iter() {
-        need_cmd(build.cc(target).as_ref());
-        if let Some(ar) = build.ar(target) {
-            need_cmd(ar.as_ref());
-        }
-    }
-    for host in build.config.host.iter() {
-        need_cmd(build.cxx(host).as_ref());
-    }
-
-    // Externally configured LLVM requires FileCheck to exist
-    let filecheck = build.llvm_filecheck(&build.config.build);
-    if !filecheck.starts_with(&build.out) && !filecheck.exists() {
-        panic!("filecheck executable {:?} does not exist", filecheck);
-    }
-
-    for target in build.config.target.iter() {
-        // Either can't build or don't want to run jemalloc on these targets
-        if target.contains("rumprun") ||
-           target.contains("bitrig") ||
-           target.contains("openbsd") ||
-           target.contains("msvc") {
-            build.config.use_jemalloc = false;
-        }
-
-        // Can't compile for iOS unless we're on OSX
-        if target.contains("apple-ios") &&
-           !build.config.build.contains("apple-darwin") {
-            panic!("the iOS target is only supported on OSX");
-        }
-
-        // Make sure musl-root is valid if specified
-        if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) {
-            match build.config.musl_root {
-                Some(ref root) => {
-                    if fs::metadata(root.join("lib/libc.a")).is_err() {
-                        panic!("couldn't find libc.a in musl dir: {}",
-                               root.join("lib").display());
-                    }
-                    if fs::metadata(root.join("lib/libunwind.a")).is_err() {
-                        panic!("couldn't find libunwind.a in musl dir: {}",
-                               root.join("lib").display());
-                    }
-                }
-                None => {
-                    panic!("when targeting MUSL the build.musl-root option \
-                            must be specified in config.toml")
-                }
-            }
-        }
-
-        if target.contains("msvc") {
-            // There are three builds of cmake on windows: MSVC, MinGW, and
-            // Cygwin. The Cygwin build does not have generators for Visual
-            // Studio, so detect that here and error.
-            let out = output(Command::new("cmake").arg("--help"));
-            if !out.contains("Visual Studio") {
-                panic!("
-cmake does not support Visual Studio generators.
-
-This is likely due to it being an msys/cygwin build of cmake,
-rather than the required windows version, built using MinGW
-or Visual Studio.
-
-If you are building under msys2 try installing the mingw-w64-x86_64-cmake
-package instead of cmake:
-
-$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
-");
-            }
-        }
-
-        if target.contains("arm-linux-android") {
-            need_cmd("adb".as_ref());
-        }
-    }
-
-    for host in build.flags.host.iter() {
-        if !build.config.host.contains(host) {
-            panic!("specified host `{}` is not in the ./configure list", host);
-        }
-    }
-    for target in build.flags.target.iter() {
-        if !build.config.target.contains(target) {
-            panic!("specified target `{}` is not in the ./configure list",
-                   target);
-        }
-    }
-
-    let run = |cmd: &mut Command| {
-        cmd.output().map(|output| {
-            String::from_utf8_lossy(&output.stdout)
-                   .lines().next().unwrap()
-                   .to_string()
-        })
-    };
-    build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
-    build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
-    if build.lldb_version.is_some() {
-        build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
-    }
-}
diff --git a/src/bootstrap/build/step.rs b/src/bootstrap/build/step.rs
deleted file mode 100644 (file)
index 7cbbd67..0000000
+++ /dev/null
@@ -1,590 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Major workhorse of rustbuild, definition and dependencies between stages of
-//! the copmile.
-//!
-//! The primary purpose of this module is to define the various `Step`s of
-//! execution of the build. Each `Step` has a corresponding `Source` indicating
-//! what it's actually doing along with a number of dependencies which must be
-//! executed first.
-//!
-//! This module will take the CLI as input and calculate the steps required for
-//! the build requested, ensuring that all intermediate pieces are in place.
-//! Essentially this module is a `make`-replacement, but not as good.
-
-use std::collections::HashSet;
-
-use build::{Build, Compiler};
-
-#[derive(Hash, Eq, PartialEq, Clone, Debug)]
-pub struct Step<'a> {
-    pub src: Source<'a>,
-    pub target: &'a str,
-}
-
-/// Macro used to iterate over all targets that are recognized by the build
-/// system.
-///
-/// Whenever a new step is added it will involve adding an entry here, updating
-/// the dependencies section below, and then adding an implementation of the
-/// step in `build/mod.rs`.
-///
-/// This macro takes another macro as an argument and then calls that macro with
-/// all steps that the build system knows about.
-macro_rules! targets {
-    ($m:ident) => {
-        $m! {
-            // Step representing building the stageN compiler. This is just the
-            // compiler executable itself, not any of the support libraries
-            (rustc, Rustc { stage: u32 }),
-
-            // Steps for the two main cargo builds. These are parameterized over
-            // the compiler which is producing the artifact.
-            (libstd, Libstd { compiler: Compiler<'a> }),
-            (libtest, Libtest { compiler: Compiler<'a> }),
-            (librustc, Librustc { compiler: Compiler<'a> }),
-
-            // Links the target produced by the compiler provided into the
-            // host's directory also provided.
-            (libstd_link, LibstdLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-            (libtest_link, LibtestLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-            (librustc_link, LibrustcLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-
-            // Various tools that we can build as part of the build.
-            (tool_linkchecker, ToolLinkchecker { stage: u32 }),
-            (tool_rustbook, ToolRustbook { stage: u32 }),
-            (tool_error_index, ToolErrorIndex { stage: u32 }),
-            (tool_cargotest, ToolCargoTest { stage: u32 }),
-            (tool_tidy, ToolTidy { stage: u32 }),
-            (tool_compiletest, ToolCompiletest { stage: u32 }),
-
-            // Steps for long-running native builds. Ideally these wouldn't
-            // actually exist and would be part of build scripts, but for now
-            // these are here.
-            //
-            // There aren't really any parameters to this, but empty structs
-            // with braces are unstable so we just pick something that works.
-            (llvm, Llvm { _dummy: () }),
-            (compiler_rt, CompilerRt { _dummy: () }),
-            (test_helpers, TestHelpers { _dummy: () }),
-            (debugger_scripts, DebuggerScripts { stage: u32 }),
-
-            // Steps for various pieces of documentation that we can generate,
-            // the 'doc' step is just a pseudo target to depend on a bunch of
-            // others.
-            (doc, Doc { stage: u32 }),
-            (doc_book, DocBook { stage: u32 }),
-            (doc_nomicon, DocNomicon { stage: u32 }),
-            (doc_style, DocStyle { stage: u32 }),
-            (doc_standalone, DocStandalone { stage: u32 }),
-            (doc_std, DocStd { stage: u32 }),
-            (doc_test, DocTest { stage: u32 }),
-            (doc_rustc, DocRustc { stage: u32 }),
-            (doc_error_index, DocErrorIndex { stage: u32 }),
-
-            // Steps for running tests. The 'check' target is just a pseudo
-            // target to depend on a bunch of others.
-            (check, Check { stage: u32, compiler: Compiler<'a> }),
-            (check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
-            (check_linkcheck, CheckLinkcheck { stage: u32 }),
-            (check_cargotest, CheckCargoTest { stage: u32 }),
-            (check_tidy, CheckTidy { stage: u32 }),
-            (check_rpass, CheckRPass { compiler: Compiler<'a> }),
-            (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
-            (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
-            (check_rfail, CheckRFail { compiler: Compiler<'a> }),
-            (check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }),
-            (check_cfail, CheckCFail { compiler: Compiler<'a> }),
-            (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
-            (check_pfail, CheckPFail { compiler: Compiler<'a> }),
-            (check_pretty, CheckPretty { compiler: Compiler<'a> }),
-            (check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }),
-            (check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }),
-            (check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }),
-            (check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }),
-            (check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }),
-            (check_codegen, CheckCodegen { compiler: Compiler<'a> }),
-            (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
-            (check_incremental, CheckIncremental { compiler: Compiler<'a> }),
-            (check_ui, CheckUi { compiler: Compiler<'a> }),
-            (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
-            (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
-            (check_docs, CheckDocs { compiler: Compiler<'a> }),
-            (check_error_index, CheckErrorIndex { compiler: Compiler<'a> }),
-            (check_rmake, CheckRMake { compiler: Compiler<'a> }),
-            (check_crate_std, CheckCrateStd { compiler: Compiler<'a> }),
-            (check_crate_test, CheckCrateTest { compiler: Compiler<'a> }),
-            (check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }),
-
-            // Distribution targets, creating tarballs
-            (dist, Dist { stage: u32 }),
-            (dist_docs, DistDocs { stage: u32 }),
-            (dist_mingw, DistMingw { _dummy: () }),
-            (dist_rustc, DistRustc { stage: u32 }),
-            (dist_std, DistStd { compiler: Compiler<'a> }),
-
-            // Misc targets
-            (android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
-        }
-    }
-}
-
-// Define the `Source` enum by iterating over all the steps and peeling out just
-// the types that we want to define.
-
-macro_rules! item { ($a:item) => ($a) }
-
-macro_rules! define_source {
-    ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {
-        item! {
-            #[derive(Hash, Eq, PartialEq, Clone, Debug)]
-            pub enum Source<'a> {
-                $($name { $($args)* }),*
-            }
-        }
-    }
-}
-
-targets!(define_source);
-
-/// Calculate a list of all steps described by `build`.
-///
-/// This will inspect the flags passed in on the command line and use that to
-/// build up a list of steps to execute. These steps will then be transformed
-/// into a topologically sorted list which when executed left-to-right will
-/// correctly sequence the entire build.
-pub fn all(build: &Build) -> Vec<Step> {
-    let mut ret = Vec::new();
-    let mut all = HashSet::new();
-    for target in top_level(build) {
-        fill(build, &target, &mut ret, &mut all);
-    }
-    return ret;
-
-    fn fill<'a>(build: &'a Build,
-                target: &Step<'a>,
-                ret: &mut Vec<Step<'a>>,
-                set: &mut HashSet<Step<'a>>) {
-        if set.insert(target.clone()) {
-            for dep in target.deps(build) {
-                fill(build, &dep, ret, set);
-            }
-            ret.push(target.clone());
-        }
-    }
-}
-
-/// Determines what top-level targets are requested as part of this build,
-/// returning them as a list.
-fn top_level(build: &Build) -> Vec<Step> {
-    let mut targets = Vec::new();
-    let stage = build.flags.stage.unwrap_or(2);
-
-    let host = Step {
-        src: Source::Llvm { _dummy: () },
-        target: build.flags.host.iter().next()
-                     .unwrap_or(&build.config.build),
-    };
-    let target = Step {
-        src: Source::Llvm { _dummy: () },
-        target: build.flags.target.iter().next().map(|x| &x[..])
-                     .unwrap_or(host.target)
-    };
-
-    // First, try to find steps on the command line.
-    add_steps(build, stage, &host, &target, &mut targets);
-
-    // If none are specified, then build everything.
-    if targets.len() == 0 {
-        let t = Step {
-            src: Source::Llvm { _dummy: () },
-            target: &build.config.build,
-        };
-        if build.config.docs {
-          targets.push(t.doc(stage));
-        }
-        for host in build.config.host.iter() {
-            if !build.flags.host.contains(host) {
-                continue
-            }
-            let host = t.target(host);
-            if host.target == build.config.build {
-                targets.push(host.librustc(host.compiler(stage)));
-            } else {
-                targets.push(host.librustc_link(t.compiler(stage), host.target));
-            }
-            for target in build.config.target.iter() {
-                if !build.flags.target.contains(target) {
-                    continue
-                }
-
-                if host.target == build.config.build {
-                    targets.push(host.target(target)
-                                     .libtest(host.compiler(stage)));
-                } else {
-                    targets.push(host.target(target)
-                                     .libtest_link(t.compiler(stage), host.target));
-                }
-            }
-        }
-    }
-
-    return targets
-
-}
-
-fn add_steps<'a>(build: &'a Build,
-                 stage: u32,
-                 host: &Step<'a>,
-                 target: &Step<'a>,
-                 targets: &mut Vec<Step<'a>>) {
-    struct Context<'a> {
-        stage: u32,
-        compiler: Compiler<'a>,
-        _dummy: (),
-        host: &'a str,
-    }
-    for step in build.flags.step.iter() {
-
-        // The macro below insists on hygienic access to all local variables, so
-        // we shove them all in a struct and subvert hygiene by accessing struct
-        // fields instead,
-        let cx = Context {
-            stage: stage,
-            compiler: host.target(&build.config.build).compiler(stage),
-            _dummy: (),
-            host: host.target,
-        };
-        macro_rules! add_step {
-            ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(
-                let name = stringify!($short).replace("_", "-");
-                if &step[..] == &name[..] {
-                    targets.push(target.$short($(cx.$arg),*));
-                    continue
-                }
-                drop(name);
-            )*})
-        }
-
-        targets!(add_step);
-
-        panic!("unknown step: {}", step);
-    }
-}
-
-macro_rules! constructors {
-    ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(
-        fn $short(&self, $($arg: $t),*) -> Step<'a> {
-            Step {
-                src: Source::$name { $($arg: $arg),* },
-                target: self.target,
-            }
-        }
-    )*}
-}
-
-impl<'a> Step<'a> {
-    fn compiler(&self, stage: u32) -> Compiler<'a> {
-        Compiler::new(stage, self.target)
-    }
-
-    fn target(&self, target: &'a str) -> Step<'a> {
-        Step { target: target, src: self.src.clone() }
-    }
-
-    // Define ergonomic constructors for each step defined above so they can be
-    // easily constructed.
-    targets!(constructors);
-
-    /// Mapping of all dependencies for rustbuild.
-    ///
-    /// This function receives a step, the build that we're building for, and
-    /// then returns a list of all the dependencies of that step.
-    pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {
-        match self.src {
-            Source::Rustc { stage: 0 } => {
-                Vec::new()
-            }
-            Source::Rustc { stage } => {
-                let compiler = Compiler::new(stage - 1, &build.config.build);
-                vec![self.librustc(compiler)]
-            }
-            Source::Librustc { compiler } => {
-                vec![self.libtest(compiler), self.llvm(())]
-            }
-            Source::Libtest { compiler } => {
-                vec![self.libstd(compiler)]
-            }
-            Source::Libstd { compiler } => {
-                vec![self.compiler_rt(()),
-                     self.rustc(compiler.stage).target(compiler.host)]
-            }
-            Source::LibrustcLink { compiler, host } => {
-                vec![self.librustc(compiler),
-                     self.libtest_link(compiler, host)]
-            }
-            Source::LibtestLink { compiler, host } => {
-                vec![self.libtest(compiler), self.libstd_link(compiler, host)]
-            }
-            Source::LibstdLink { compiler, host } => {
-                vec![self.libstd(compiler),
-                     self.target(host).rustc(compiler.stage)]
-            }
-            Source::CompilerRt { _dummy } => {
-                vec![self.llvm(()).target(&build.config.build)]
-            }
-            Source::Llvm { _dummy } => Vec::new(),
-            Source::TestHelpers { _dummy } => Vec::new(),
-            Source::DebuggerScripts { stage: _ } => Vec::new(),
-
-            // Note that all doc targets depend on artifacts from the build
-            // architecture, not the target (which is where we're generating
-            // docs into).
-            Source::DocStd { stage } => {
-                let compiler = self.target(&build.config.build).compiler(stage);
-                vec![self.libstd(compiler)]
-            }
-            Source::DocTest { stage } => {
-                let compiler = self.target(&build.config.build).compiler(stage);
-                vec![self.libtest(compiler)]
-            }
-            Source::DocBook { stage } |
-            Source::DocNomicon { stage } |
-            Source::DocStyle { stage } => {
-                vec![self.target(&build.config.build).tool_rustbook(stage)]
-            }
-            Source::DocErrorIndex { stage } => {
-                vec![self.target(&build.config.build).tool_error_index(stage)]
-            }
-            Source::DocStandalone { stage } => {
-                vec![self.target(&build.config.build).rustc(stage)]
-            }
-            Source::DocRustc { stage } => {
-                vec![self.doc_test(stage)]
-            }
-            Source::Doc { stage } => {
-                vec![self.doc_book(stage), self.doc_nomicon(stage),
-                     self.doc_style(stage), self.doc_standalone(stage),
-                     self.doc_std(stage),
-                     self.doc_error_index(stage)]
-            }
-            Source::Check { stage, compiler } => {
-                // Check is just a pseudo step which means check all targets,
-                // so just depend on checking all targets.
-                build.config.target.iter().map(|t| {
-                    self.target(t).check_target(stage, compiler)
-                }).collect()
-            }
-            Source::CheckTarget { stage, compiler } => {
-                // CheckTarget here means run all possible test suites for this
-                // target. Most of the time, however, we can't actually run
-                // anything if we're not the build triple as we could be cross
-                // compiling.
-                //
-                // As a result, the base set of targets here is quite stripped
-                // down from the standard set of targets. These suites have
-                // their own internal logic to run in cross-compiled situations
-                // if they'll run at all. For example compiletest knows that
-                // when testing Android targets we ship artifacts to the
-                // emulator.
-                //
-                // When in doubt the rule of thumb for adding to this list is
-                // "should this test suite run on the android bot?"
-                let mut base = vec![
-                    self.check_rpass(compiler),
-                    self.check_rfail(compiler),
-                    self.check_crate_std(compiler),
-                    self.check_crate_test(compiler),
-                    self.check_debuginfo(compiler),
-                    self.dist(stage),
-                ];
-
-                // If we're testing the build triple, then we know we can
-                // actually run binaries and such, so we run all possible tests
-                // that we know about.
-                if self.target == build.config.build {
-                    base.extend(vec![
-                        // docs-related
-                        self.check_docs(compiler),
-                        self.check_error_index(compiler),
-                        self.check_rustdoc(compiler),
-
-                        // UI-related
-                        self.check_cfail(compiler),
-                        self.check_pfail(compiler),
-                        self.check_ui(compiler),
-
-                        // codegen-related
-                        self.check_incremental(compiler),
-                        self.check_codegen(compiler),
-                        self.check_codegen_units(compiler),
-
-                        // misc compiletest-test suites
-                        self.check_rpass_full(compiler),
-                        self.check_rfail_full(compiler),
-                        self.check_cfail_full(compiler),
-                        self.check_pretty_rpass_full(compiler),
-                        self.check_pretty_rfail_full(compiler),
-                        self.check_rpass_valgrind(compiler),
-                        self.check_rmake(compiler),
-
-                        // crates
-                        self.check_crate_rustc(compiler),
-
-                        // pretty
-                        self.check_pretty(compiler),
-                        self.check_pretty_rpass(compiler),
-                        self.check_pretty_rfail(compiler),
-                        self.check_pretty_rpass_valgrind(compiler),
-
-                        // misc
-                        self.check_linkcheck(stage),
-                        self.check_tidy(stage),
-                    ]);
-                }
-                return base
-            }
-            Source::CheckLinkcheck { stage } => {
-                vec![self.tool_linkchecker(stage), self.doc(stage)]
-            }
-            Source::CheckCargoTest { stage } => {
-                vec![self.tool_cargotest(stage),
-                     self.librustc(self.compiler(stage))]
-            }
-            Source::CheckTidy { stage } => {
-                vec![self.tool_tidy(stage)]
-            }
-            Source::CheckPrettyRPass { compiler } |
-            Source::CheckPrettyRFail { compiler } |
-            Source::CheckRFail { compiler } |
-            Source::CheckPFail { compiler } |
-            Source::CheckCodegen { compiler } |
-            Source::CheckCodegenUnits { compiler } |
-            Source::CheckIncremental { compiler } |
-            Source::CheckUi { compiler } |
-            Source::CheckRustdoc { compiler } |
-            Source::CheckPretty { compiler } |
-            Source::CheckCFail { compiler } |
-            Source::CheckRPassValgrind { compiler } |
-            Source::CheckRPass { compiler } => {
-                let mut base = vec![
-                    self.libtest(compiler),
-                    self.target(compiler.host).tool_compiletest(compiler.stage),
-                    self.test_helpers(()),
-                ];
-                if self.target.contains("android") {
-                    base.push(self.android_copy_libs(compiler));
-                }
-                base
-            }
-            Source::CheckDebuginfo { compiler } => {
-                vec![
-                    self.libtest(compiler),
-                    self.target(compiler.host).tool_compiletest(compiler.stage),
-                    self.test_helpers(()),
-                    self.debugger_scripts(compiler.stage),
-                ]
-            }
-            Source::CheckRPassFull { compiler } |
-            Source::CheckRFailFull { compiler } |
-            Source::CheckCFailFull { compiler } |
-            Source::CheckPrettyRPassFull { compiler } |
-            Source::CheckPrettyRFailFull { compiler } |
-            Source::CheckPrettyRPassValgrind { compiler } |
-            Source::CheckRMake { compiler } => {
-                vec![self.librustc(compiler),
-                     self.target(compiler.host).tool_compiletest(compiler.stage)]
-            }
-            Source::CheckDocs { compiler } => {
-                vec![self.libstd(compiler)]
-            }
-            Source::CheckErrorIndex { compiler } => {
-                vec![self.libstd(compiler),
-                     self.target(compiler.host).tool_error_index(compiler.stage)]
-            }
-            Source::CheckCrateStd { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-            Source::CheckCrateTest { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-            Source::CheckCrateRustc { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-
-            Source::ToolLinkchecker { stage } |
-            Source::ToolTidy { stage } => {
-                vec![self.libstd(self.compiler(stage))]
-            }
-            Source::ToolErrorIndex { stage } |
-            Source::ToolRustbook { stage } => {
-                vec![self.librustc(self.compiler(stage))]
-            }
-            Source::ToolCargoTest { stage } => {
-                vec![self.libstd(self.compiler(stage))]
-            }
-            Source::ToolCompiletest { stage } => {
-                vec![self.libtest(self.compiler(stage))]
-            }
-
-            Source::DistDocs { stage } => vec![self.doc(stage)],
-            Source::DistMingw { _dummy: _ } => Vec::new(),
-            Source::DistRustc { stage } => {
-                vec![self.rustc(stage)]
-            }
-            Source::DistStd { compiler } => {
-                // We want to package up as many target libraries as possible
-                // for the `rust-std` package, so if this is a host target we
-                // depend on librustc and otherwise we just depend on libtest.
-                if build.config.host.iter().any(|t| t == self.target) {
-                    vec![self.librustc(compiler)]
-                } else {
-                    vec![self.libtest(compiler)]
-                }
-            }
-
-            Source::Dist { stage } => {
-                let mut base = Vec::new();
-
-                for host in build.config.host.iter() {
-                    let host = self.target(host);
-                    base.push(host.dist_rustc(stage));
-                    if host.target.contains("windows-gnu") {
-                        base.push(host.dist_mingw(()));
-                    }
-
-                    let compiler = self.compiler(stage);
-                    for target in build.config.target.iter() {
-                        let target = self.target(target);
-                        if build.config.docs {
-                            base.push(target.dist_docs(stage));
-                        }
-                        base.push(target.dist_std(compiler));
-                    }
-                }
-                return base
-            }
-
-            Source::AndroidCopyLibs { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-        }
-    }
-}
diff --git a/src/bootstrap/build/util.rs b/src/bootstrap/build/util.rs
deleted file mode 100644 (file)
index 36ce064..0000000
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Various utility functions used throughout rustbuild.
-//!
-//! Simple things like testing the various filesystem operations here and there,
-//! not a lot of interesting happenings here unfortunately.
-
-use std::env;
-use std::path::{Path, PathBuf};
-use std::fs;
-use std::process::Command;
-
-use bootstrap::{dylib_path, dylib_path_var};
-use filetime::FileTime;
-
-/// Returns the `name` as the filename of a static library for `target`.
-pub fn staticlib(name: &str, target: &str) -> String {
-    if target.contains("windows-msvc") {
-        format!("{}.lib", name)
-    } else {
-        format!("lib{}.a", name)
-    }
-}
-
-/// Returns the last-modified time for `path`, or zero if it doesn't exist.
-pub fn mtime(path: &Path) -> FileTime {
-    fs::metadata(path).map(|f| {
-        FileTime::from_last_modification_time(&f)
-    }).unwrap_or(FileTime::zero())
-}
-
-/// Copies a file from `src` to `dst`, attempting to use hard links and then
-/// falling back to an actually filesystem copy if necessary.
-pub fn copy(src: &Path, dst: &Path) {
-    let res = fs::hard_link(src, dst);
-    let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
-    if let Err(e) = res {
-        panic!("failed to copy `{}` to `{}`: {}", src.display(),
-               dst.display(), e)
-    }
-}
-
-/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
-/// when this function is called.
-pub fn cp_r(src: &Path, dst: &Path) {
-    for f in t!(fs::read_dir(src)) {
-        let f = t!(f);
-        let path = f.path();
-        let name = path.file_name().unwrap();
-        let dst = dst.join(name);
-        if t!(f.file_type()).is_dir() {
-            let _ = fs::remove_dir_all(&dst);
-            t!(fs::create_dir(&dst));
-            cp_r(&path, &dst);
-        } else {
-            let _ = fs::remove_file(&dst);
-            copy(&path, &dst);
-        }
-    }
-}
-
-/// Given an executable called `name`, return the filename for the
-/// executable for a particular target.
-pub fn exe(name: &str, target: &str) -> String {
-    if target.contains("windows") {
-        format!("{}.exe", name)
-    } else {
-        name.to_string()
-    }
-}
-
-/// Returns whether the file name given looks like a dynamic library.
-pub fn is_dylib(name: &str) -> bool {
-    name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
-}
-
-/// Returns the corresponding relative library directory that the compiler's
-/// dylibs will be found in.
-pub fn libdir(target: &str) -> &'static str {
-    if target.contains("windows") {"bin"} else {"lib"}
-}
-
-/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
-pub fn add_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
-    let mut list = dylib_path();
-    for path in path {
-        list.insert(0, path);
-    }
-    cmd.env(dylib_path_var(), t!(env::join_paths(list)));
-}
-
-/// Returns whether `dst` is up to date given that the file or files in `src`
-/// are used to generate it.
-///
-/// Uses last-modified time checks to verify this.
-pub fn up_to_date(src: &Path, dst: &Path) -> bool {
-    let threshold = mtime(dst);
-    let meta = t!(fs::metadata(src));
-    if meta.is_dir() {
-        dir_up_to_date(src, &threshold)
-    } else {
-        FileTime::from_last_modification_time(&meta) <= threshold
-    }
-}
-
-fn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {
-    t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {
-        let meta = t!(e.metadata());
-        if meta.is_dir() {
-            dir_up_to_date(&e.path(), threshold)
-        } else {
-            FileTime::from_last_modification_time(&meta) < *threshold
-        }
-    })
-}
diff --git a/src/bootstrap/cc.rs b/src/bootstrap/cc.rs
new file mode 100644 (file)
index 0000000..e2bde4a
--- /dev/null
@@ -0,0 +1,124 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! C-compiler probing and detection.
+//!
+//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
+//! C and C++ compilers for each target configured. A compiler is found through
+//! a number of vectors (in order of precedence)
+//!
+//! 1. Configuration via `target.$target.cc` in `config.toml`.
+//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
+//!    applicable
+//! 3. Special logic to probe on OpenBSD
+//! 4. The `CC_$target` environment variable.
+//! 5. The `CC` environment variable.
+//! 6. "cc"
+//!
+//! Some of this logic is implemented here, but much of it is farmed out to the
+//! `gcc` crate itself, so we end up having the same fallbacks as there.
+//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
+//! used.
+//!
+//! It is intended that after this module has run no C/C++ compiler will
+//! ever be probed for. Instead the compilers found here will be used for
+//! everything.
+
+use std::process::Command;
+
+use build_helper::{cc2ar, output};
+use gcc;
+
+use Build;
+use config::Target;
+
+pub fn find(build: &mut Build) {
+    // For all targets we're going to need a C compiler for building some shims
+    // and such as well as for being a linker for Rust code.
+    for target in build.config.target.iter() {
+        let mut cfg = gcc::Config::new();
+        cfg.cargo_metadata(false).opt_level(0).debug(false)
+           .target(target).host(&build.config.build);
+
+        let config = build.config.target_config.get(target);
+        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
+            cfg.compiler(cc);
+        } else {
+            set_compiler(&mut cfg, "gcc", target, config);
+        }
+
+        let compiler = cfg.get_compiler();
+        let ar = cc2ar(compiler.path(), target);
+        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
+        if let Some(ref ar) = ar {
+            build.verbose(&format!("AR_{} = {:?}", target, ar));
+        }
+        build.cc.insert(target.to_string(), (compiler, ar));
+    }
+
+    // For all host triples we need to find a C++ compiler as well
+    for host in build.config.host.iter() {
+        let mut cfg = gcc::Config::new();
+        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
+           .target(host).host(&build.config.build);
+        let config = build.config.target_config.get(host);
+        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
+            cfg.compiler(cxx);
+        } else {
+            set_compiler(&mut cfg, "g++", host, config);
+        }
+        let compiler = cfg.get_compiler();
+        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
+        build.cxx.insert(host.to_string(), compiler);
+    }
+}
+
+fn set_compiler(cfg: &mut gcc::Config,
+                gnu_compiler: &str,
+                target: &str,
+                config: Option<&Target>) {
+    match target {
+        // When compiling for android we may have the NDK configured in the
+        // config.toml in which case we look there. Otherwise the default
+        // compiler already takes into account the triple in question.
+        t if t.contains("android") => {
+            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
+                let target = target.replace("armv7", "arm");
+                let compiler = format!("{}-{}", target, gnu_compiler);
+                cfg.compiler(ndk.join("bin").join(compiler));
+            }
+        }
+
+        // The default gcc version from OpenBSD may be too old, try using egcc,
+        // which is a gcc version from ports, if this is the case.
+        t if t.contains("openbsd") => {
+            let c = cfg.get_compiler();
+            if !c.path().ends_with(gnu_compiler) {
+                return
+            }
+
+            let output = output(c.to_command().arg("--version"));
+            let i = match output.find(" 4.") {
+                Some(i) => i,
+                None => return,
+            };
+            match output[i + 3..].chars().next().unwrap() {
+                '0' ... '6' => {}
+                _ => return,
+            }
+            let alternative = format!("e{}", gnu_compiler);
+            if Command::new(&alternative).output().is_ok() {
+                cfg.compiler(alternative);
+            }
+        }
+
+        _ => {}
+    }
+}
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
new file mode 100644 (file)
index 0000000..879c383
--- /dev/null
@@ -0,0 +1,110 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Build configuration for Rust's release channels.
+//!
+//! Implements the stable/beta/nightly channel distinctions by setting various
+//! flags like the `unstable_features`, calculating variables like `release` and
+//! `package_vers`, and otherwise indicating to the compiler what it should
+//! print out as part of its version information.
+
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::process::Command;
+
+use build_helper::output;
+use md5;
+
+use Build;
+
+pub fn collect(build: &mut Build) {
+    // Currently the canonical source for the release number (e.g. 1.10.0) and
+    // the prerelease version (e.g. `.1`) is in `mk/main.mk`. We "parse" that
+    // here to learn about those numbers.
+    let mut main_mk = String::new();
+    t!(t!(File::open(build.src.join("mk/main.mk"))).read_to_string(&mut main_mk));
+    let mut release_num = "";
+    let mut prerelease_version = "";
+    for line in main_mk.lines() {
+        if line.starts_with("CFG_RELEASE_NUM") {
+            release_num = line.split('=').skip(1).next().unwrap().trim();
+        }
+        if line.starts_with("CFG_PRERELEASE_VERSION") {
+            prerelease_version = line.split('=').skip(1).next().unwrap().trim();
+        }
+    }
+
+    // Depending on the channel, passed in `./configure --release-channel`,
+    // determine various properties of the build.
+    match &build.config.channel[..] {
+        "stable" => {
+            build.release = release_num.to_string();
+            build.package_vers = build.release.clone();
+            build.unstable_features = false;
+        }
+        "beta" => {
+            build.release = format!("{}-beta{}", release_num,
+                                   prerelease_version);
+            build.package_vers = "beta".to_string();
+            build.unstable_features = false;
+        }
+        "nightly" => {
+            build.release = format!("{}-nightly", release_num);
+            build.package_vers = "nightly".to_string();
+            build.unstable_features = true;
+        }
+        _ => {
+            build.release = format!("{}-dev", release_num);
+            build.package_vers = build.release.clone();
+            build.unstable_features = true;
+        }
+    }
+    build.version = build.release.clone();
+
+    // If we have a git directory, add in some various SHA information of what
+    // commit this compiler was compiled from.
+    if fs::metadata(build.src.join(".git")).is_ok() {
+        let ver_date = output(Command::new("git").current_dir(&build.src)
+                                      .arg("log").arg("-1")
+                                      .arg("--date=short")
+                                      .arg("--pretty=format:%cd"));
+        let ver_hash = output(Command::new("git").current_dir(&build.src)
+                                      .arg("rev-parse").arg("HEAD"));
+        let short_ver_hash = output(Command::new("git")
+                                            .current_dir(&build.src)
+                                            .arg("rev-parse")
+                                            .arg("--short=9")
+                                            .arg("HEAD"));
+        let ver_date = ver_date.trim().to_string();
+        let ver_hash = ver_hash.trim().to_string();
+        let short_ver_hash = short_ver_hash.trim().to_string();
+        build.version.push_str(&format!(" ({} {})", short_ver_hash,
+                                       ver_date));
+        build.ver_date = Some(ver_date.to_string());
+        build.ver_hash = Some(ver_hash);
+        build.short_ver_hash = Some(short_ver_hash);
+    }
+
+    // Calculate this compiler's bootstrap key, which is currently defined as
+    // the first 8 characters of the md5 of the release string.
+    let key = md5::compute(build.release.as_bytes());
+    build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
+                                  key[0], key[1], key[2], key[3]);
+
+    // Slurp up the stage0 bootstrap key as we're bootstrapping from an
+    // otherwise stable compiler.
+    let mut s = String::new();
+    t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
+    if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
+        if let Some(key) = line.split(": ").nth(1) {
+            build.bootstrap_key_stage0 = key.to_string();
+        }
+    }
+}
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
new file mode 100644 (file)
index 0000000..3d8b143
--- /dev/null
@@ -0,0 +1,413 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the various `check-*` targets of the build system.
+//!
+//! This file implements the various regression test suites that we execute on
+//! our CI.
+
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use build_helper::output;
+
+use {Build, Compiler, Mode};
+use util::{self, dylib_path, dylib_path_var};
+
+const ADB_TEST_DIR: &'static str = "/data/tmp";
+
+/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` will verify the validity of all our links in the
+/// documentation to ensure we don't have a bunch of dead ones.
+pub fn linkcheck(build: &Build, stage: u32, host: &str) {
+    println!("Linkcheck stage{} ({})", stage, host);
+    let compiler = Compiler::new(stage, host);
+    build.run(build.tool_cmd(&compiler, "linkchecker")
+                   .arg(build.out.join(host).join("doc")));
+}
+
+/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` will check out a few Rust projects and run `cargo
+/// test` to ensure that we don't regress the test suites there.
+pub fn cargotest(build: &Build, stage: u32, host: &str) {
+    let ref compiler = Compiler::new(stage, host);
+
+    // Configure PATH to find the right rustc. NB. we have to use PATH
+    // and not RUSTC because the Cargo test suite has tests that will
+    // fail if rustc is not spelled `rustc`.
+    let path = build.sysroot(compiler).join("bin");
+    let old_path = ::std::env::var("PATH").expect("");
+    let sep = if cfg!(windows) { ";" } else {":" };
+    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
+
+    // Note that this is a short, cryptic, and not scoped directory name. This
+    // is currently to minimize the length of path on Windows where we otherwise
+    // quickly run into path name limit constraints.
+    let out_dir = build.out.join("ct");
+    t!(fs::create_dir_all(&out_dir));
+
+    build.run(build.tool_cmd(compiler, "cargotest")
+                   .env("PATH", newpath)
+                   .arg(&build.cargo)
+                   .arg(&out_dir));
+}
+
+/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` checks up on various bits and pieces of style and
+/// otherwise just implements a few lint-like checks that are specific to the
+/// compiler itself.
+pub fn tidy(build: &Build, stage: u32, host: &str) {
+    println!("tidy check stage{} ({})", stage, host);
+    let compiler = Compiler::new(stage, host);
+    build.run(build.tool_cmd(&compiler, "tidy")
+                   .arg(build.src.join("src")));
+}
+
+fn testdir(build: &Build, host: &str) -> PathBuf {
+    build.out.join(host).join("test")
+}
+
+/// Executes the `compiletest` tool to run a suite of tests.
+///
+/// Compiles all tests with `compiler` for `target` with the specified
+/// compiletest `mode` and `suite` arguments. For example `mode` can be
+/// "run-pass" or `suite` can be something like `debuginfo`.
+pub fn compiletest(build: &Build,
+                   compiler: &Compiler,
+                   target: &str,
+                   mode: &str,
+                   suite: &str) {
+    println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
+    let mut cmd = build.tool_cmd(compiler, "compiletest");
+
+    // compiletest currently has... a lot of arguments, so let's just pass all
+    // of them!
+
+    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
+    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
+    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
+    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
+    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+    cmd.arg("--mode").arg(mode);
+    cmd.arg("--target").arg(target);
+    cmd.arg("--host").arg(compiler.host);
+    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
+
+    let mut flags = vec!["-Crpath".to_string()];
+    if build.config.rust_optimize_tests {
+        flags.push("-O".to_string());
+    }
+    if build.config.rust_debuginfo_tests {
+        flags.push("-g".to_string());
+    }
+
+    let mut hostflags = build.rustc_flags(&compiler.host);
+    hostflags.extend(flags.clone());
+    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+
+    let mut targetflags = build.rustc_flags(&target);
+    targetflags.extend(flags);
+    targetflags.push(format!("-Lnative={}",
+                             build.test_helpers_out(target).display()));
+    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+
+    // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
+    let python_default = "python";
+    cmd.arg("--docck-python").arg(python_default);
+
+    if build.config.build.ends_with("apple-darwin") {
+        // Force /usr/bin/python on OSX for LLDB tests because we're loading the
+        // LLDB plugin's compiled module which only works with the system python
+        // (namely not Homebrew-installed python)
+        cmd.arg("--lldb-python").arg("/usr/bin/python");
+    } else {
+        cmd.arg("--lldb-python").arg(python_default);
+    }
+
+    if let Some(ref vers) = build.gdb_version {
+        cmd.arg("--gdb-version").arg(vers);
+    }
+    if let Some(ref vers) = build.lldb_version {
+        cmd.arg("--lldb-version").arg(vers);
+    }
+    if let Some(ref dir) = build.lldb_python_dir {
+        cmd.arg("--lldb-python-dir").arg(dir);
+    }
+
+    cmd.args(&build.flags.args);
+
+    if build.config.verbose || build.flags.verbose {
+        cmd.arg("--verbose");
+    }
+
+    // Only pass correct values for these flags for the `run-make` suite as it
+    // requires that a C++ compiler was configured which isn't always the case.
+    if suite == "run-make" {
+        let llvm_config = build.llvm_config(target);
+        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
+        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
+        cmd.arg("--cc").arg(build.cc(target))
+           .arg("--cxx").arg(build.cxx(target))
+           .arg("--cflags").arg(build.cflags(target).join(" "))
+           .arg("--llvm-components").arg(llvm_components.trim())
+           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
+    } else {
+        cmd.arg("--cc").arg("")
+           .arg("--cxx").arg("")
+           .arg("--cflags").arg("")
+           .arg("--llvm-components").arg("")
+           .arg("--llvm-cxxflags").arg("");
+    }
+
+    // Running a C compiler on MSVC requires a few env vars to be set, to be
+    // sure to set them here.
+    if target.contains("msvc") {
+        for &(ref k, ref v) in build.cc[target].0.env() {
+            if k != "PATH" {
+                cmd.env(k, v);
+            }
+        }
+    }
+    build.add_bootstrap_key(compiler, &mut cmd);
+
+    cmd.arg("--adb-path").arg("adb");
+    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
+    if target.contains("android") {
+        // Assume that cc for this target comes from the android sysroot
+        cmd.arg("--android-cross-path")
+           .arg(build.cc(target).parent().unwrap().parent().unwrap());
+    } else {
+        cmd.arg("--android-cross-path").arg("");
+    }
+
+    build.run(&mut cmd);
+}
+
+/// Run `rustdoc --test` for all documentation in `src/doc`.
+///
+/// This will run all tests in our markdown documentation (e.g. the book)
+/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
+/// `compiler`.
+pub fn docs(build: &Build, compiler: &Compiler) {
+    // Do a breadth-first traversal of the `src/doc` directory and just run
+    // tests for all files that end in `*.md`
+    let mut stack = vec![build.src.join("src/doc")];
+
+    while let Some(p) = stack.pop() {
+        if p.is_dir() {
+            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
+            continue
+        }
+
+        if p.extension().and_then(|s| s.to_str()) != Some("md") {
+            continue
+        }
+
+        println!("doc tests for: {}", p.display());
+        markdown_test(build, compiler, &p);
+    }
+}
+
+/// Run the error index generator tool to execute the tests located in the error
+/// index.
+///
+/// The `error_index_generator` tool lives in `src/tools` and is used to
+/// generate a markdown file from the error indexes of the code base which is
+/// then passed to `rustdoc --test`.
+pub fn error_index(build: &Build, compiler: &Compiler) {
+    println!("Testing error-index stage{}", compiler.stage);
+
+    let output = testdir(build, compiler.host).join("error-index.md");
+    build.run(build.tool_cmd(compiler, "error_index_generator")
+                   .arg("markdown")
+                   .arg(&output)
+                   .env("CFG_BUILD", &build.config.build));
+
+    markdown_test(build, compiler, &output);
+}
+
+fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
+    let mut cmd = Command::new(build.rustdoc(compiler));
+    build.add_rustc_lib_path(compiler, &mut cmd);
+    cmd.arg("--test");
+    cmd.arg(markdown);
+    cmd.arg("--test-args").arg(build.flags.args.join(" "));
+    build.run(&mut cmd);
+}
+
+/// Run all unit tests plus documentation tests for an entire crate DAG defined
+/// by a `Cargo.toml`
+///
+/// This is what runs tests for crates like the standard library, compiler, etc.
+/// It essentially is the driver for running `cargo test`.
+///
+/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
+/// arguments, and those arguments are discovered from `Cargo.lock`.
+pub fn krate(build: &Build,
+             compiler: &Compiler,
+             target: &str,
+             mode: Mode) {
+    let (name, path, features) = match mode {
+        Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()),
+        Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()),
+        Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()),
+        _ => panic!("can only test libraries"),
+    };
+    println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
+             compiler.host, target);
+
+    // Build up the base `cargo test` command.
+    let mut cargo = build.cargo(compiler, mode, target, "test");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join(path).join("Cargo.toml"))
+         .arg("--features").arg(features);
+
+    // Generate a list of `-p` arguments to pass to the `cargo test` invocation
+    // by crawling the corresponding Cargo.lock file.
+    let lockfile = build.src.join(path).join("Cargo.lock");
+    let mut contents = String::new();
+    t!(t!(File::open(&lockfile)).read_to_string(&mut contents));
+    let mut lines = contents.lines();
+    while let Some(line) = lines.next() {
+        let prefix = "name = \"";
+        if !line.starts_with(prefix) {
+            continue
+        }
+        lines.next(); // skip `version = ...`
+
+        // skip crates.io or otherwise non-path crates
+        if let Some(line) = lines.next() {
+            if line.starts_with("source") {
+                continue
+            }
+        }
+
+        let crate_name = &line[prefix.len()..line.len() - 1];
+
+        // Right now jemalloc is our only target-specific crate in the sense
+        // that it's not present on all platforms. Custom skip it here for now,
+        // but if we add more this probably wants to get more generalized.
+        if crate_name.contains("jemalloc") {
+            continue
+        }
+
+        cargo.arg("-p").arg(crate_name);
+    }
+
+    // The tests are going to run with the *target* libraries, so we need to
+    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+    //
+    // Note that to run the compiler we need to run with the *host* libraries,
+    // but our wrapper scripts arrange for that to be the case anyway.
+    let mut dylib_path = dylib_path();
+    dylib_path.insert(0, build.sysroot_libdir(compiler, target));
+    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+    if target.contains("android") {
+        build.run(cargo.arg("--no-run"));
+        krate_android(build, compiler, target, mode);
+    } else {
+        cargo.args(&build.flags.args);
+        build.run(&mut cargo);
+    }
+}
+
+fn krate_android(build: &Build,
+                 compiler: &Compiler,
+                 target: &str,
+                 mode: Mode) {
+    let mut tests = Vec::new();
+    let out_dir = build.cargo_out(compiler, mode, target);
+    find_tests(&out_dir, target, &mut tests);
+    find_tests(&out_dir.join("deps"), target, &mut tests);
+
+    for test in tests {
+        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
+
+        let test_file_name = test.file_name().unwrap().to_string_lossy();
+        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
+                          ADB_TEST_DIR,
+                          compiler.stage,
+                          target,
+                          compiler.host,
+                          test_file_name);
+        let program = format!("(cd {dir}; \
+                                LD_LIBRARY_PATH=./{target} ./{test} \
+                                    --logfile {log} \
+                                    {args})",
+                              dir = ADB_TEST_DIR,
+                              target = target,
+                              test = test_file_name,
+                              log = log,
+                              args = build.flags.args.join(" "));
+
+        let output = output(Command::new("adb").arg("shell").arg(&program));
+        println!("{}", output);
+        build.run(Command::new("adb")
+                          .arg("pull")
+                          .arg(&log)
+                          .arg(build.out.join("tmp")));
+        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
+        if !output.contains("result: ok") {
+            panic!("some tests failed");
+        }
+    }
+}
+
+fn find_tests(dir: &Path,
+              target: &str,
+              dst: &mut Vec<PathBuf>) {
+    for e in t!(dir.read_dir()).map(|e| t!(e)) {
+        let file_type = t!(e.file_type());
+        if !file_type.is_file() {
+            continue
+        }
+        let filename = e.file_name().into_string().unwrap();
+        if (target.contains("windows") && filename.ends_with(".exe")) ||
+           (!target.contains("windows") && !filename.contains(".")) {
+            dst.push(e.path());
+        }
+    }
+}
+
+pub fn android_copy_libs(build: &Build,
+                         compiler: &Compiler,
+                         target: &str) {
+    println!("Android copy libs to emulator ({})", target);
+    build.run(Command::new("adb").arg("remount"));
+    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
+    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
+    build.run(Command::new("adb")
+                      .arg("push")
+                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
+                      .arg(ADB_TEST_DIR));
+
+    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
+    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
+
+    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
+        let f = t!(f);
+        let name = f.file_name().into_string().unwrap();
+        if util::is_dylib(&name) {
+            build.run(Command::new("adb")
+                              .arg("push")
+                              .arg(f.path())
+                              .arg(&target_dir));
+        }
+    }
+}
diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs
new file mode 100644 (file)
index 0000000..a466e2e
--- /dev/null
@@ -0,0 +1,49 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of `make clean` in rustbuild.
+//!
+//! Responsible for cleaning out a build directory of all old and stale
+//! artifacts to prepare for a fresh build. Currently doesn't remove the
+//! `build/cache` directory (download cache) or the `build/$target/llvm`
+//! directory as we want that cached between builds.
+
+use std::fs;
+use std::path::Path;
+
+use Build;
+
+pub fn clean(build: &Build) {
+    rm_rf(build, "tmp".as_ref());
+    rm_rf(build, &build.out.join("tmp"));
+
+    for host in build.config.host.iter() {
+
+        let out = build.out.join(host);
+
+        rm_rf(build, &out.join("compiler-rt"));
+        rm_rf(build, &out.join("doc"));
+
+        for stage in 0..4 {
+            rm_rf(build, &out.join(format!("stage{}", stage)));
+            rm_rf(build, &out.join(format!("stage{}-std", stage)));
+            rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
+            rm_rf(build, &out.join(format!("stage{}-tools", stage)));
+            rm_rf(build, &out.join(format!("stage{}-test", stage)));
+        }
+    }
+}
+
+fn rm_rf(build: &Build, path: &Path) {
+    if path.exists() {
+        build.verbose(&format!("removing `{}`", path.display()));
+        t!(fs::remove_dir_all(path));
+    }
+}
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
new file mode 100644 (file)
index 0000000..8ec9c7f
--- /dev/null
@@ -0,0 +1,360 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of compiling various phases of the compiler and standard
+//! library.
+//!
+//! This module contains some of the real meat in the rustbuild build system
+//! which is where Cargo is used to compiler the standard library, libtest, and
+//! compiler. This module is also responsible for assembling the sysroot as it
+//! goes along from the output of the previous stage.
+
+use std::collections::HashMap;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use build_helper::output;
+
+use util::{exe, staticlib, libdir, mtime, is_dylib, copy};
+use {Build, Compiler, Mode};
+
+/// Build the standard library.
+///
+/// This will build the standard library for a particular stage of the build
+/// using the `compiler` targeting the `target` architecture. The artifacts
+/// created will also be linked into the sysroot directory.
+pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
+             compiler.host, target);
+
+    // Move compiler-rt into place as it'll be required by the compiler when
+    // building the standard library to link the dylib of libstd
+    let libdir = build.sysroot_libdir(compiler, target);
+    let _ = fs::remove_dir_all(&libdir);
+    t!(fs::create_dir_all(&libdir));
+    copy(&build.compiler_rt_built.borrow()[target],
+         &libdir.join(staticlib("compiler-rt", target)));
+
+    // Some platforms have startup objects that may be required to produce the
+    // libstd dynamic library, for example.
+    build_startup_objects(build, target, &libdir);
+
+    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
+    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
+    cargo.arg("--features").arg(build.std_features())
+         .arg("--manifest-path")
+         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"));
+
+    if let Some(target) = build.config.target_config.get(target) {
+        if let Some(ref jemalloc) = target.jemalloc {
+            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+        }
+    }
+    if let Some(ref p) = build.config.musl_root {
+        if target.contains("musl") {
+            cargo.env("MUSL_ROOT", p);
+        }
+    }
+
+    build.run(&mut cargo);
+    std_link(build, target, compiler, compiler.host);
+}
+
+/// Link all libstd rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn std_link(build: &Build,
+                target: &str,
+                compiler: &Compiler,
+                host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+
+    // If we're linking one compiler host's output into another, then we weren't
+    // called from the `std` method above. In that case we clean out what's
+    // already there and then also link compiler-rt into place.
+    if host != compiler.host {
+        let _ = fs::remove_dir_all(&libdir);
+        t!(fs::create_dir_all(&libdir));
+        copy(&build.compiler_rt_built.borrow()[target],
+             &libdir.join(staticlib("compiler-rt", target)));
+    }
+    add_to_sysroot(&out_dir, &libdir);
+
+    if target.contains("musl") &&
+       (target.contains("x86_64") || target.contains("i686")) {
+        copy_third_party_objects(build, target, &libdir);
+    }
+}
+
+/// Copies the crt(1,i,n).o startup objects
+///
+/// Only required for musl targets that statically link to libc
+fn copy_third_party_objects(build: &Build, target: &str, into: &Path) {
+    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
+        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
+    }
+}
+
+/// Build and prepare startup objects like rsbegin.o and rsend.o
+///
+/// These are primarily used on Windows right now for linking executables/dlls.
+/// They don't require any library support as they're just plain old object
+/// files, so we just use the nightly snapshot compiler to always build them (as
+/// no other compilers are guaranteed to be available).
+fn build_startup_objects(build: &Build, target: &str, into: &Path) {
+    if !target.contains("pc-windows-gnu") {
+        return
+    }
+    let compiler = Compiler::new(0, &build.config.build);
+    let compiler = build.compiler_path(&compiler);
+
+    for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
+        let file = t!(file);
+        build.run(Command::new(&compiler)
+                          .arg("--emit=obj")
+                          .arg("--out-dir").arg(into)
+                          .arg(file.path()));
+    }
+
+    for obj in ["crt2.o", "dllcrt2.o"].iter() {
+        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
+    }
+}
+
+/// Build libtest.
+///
+/// This will build libtest and supporting libraries for a particular stage of
+/// the build using the `compiler` targeting the `target` architecture. The
+/// artifacts created will also be linked into the sysroot directory.
+pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
+             compiler.host, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+    build.clear_if_dirty(&out_dir, &libstd_shim(build, compiler, target));
+    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
+    build.run(&mut cargo);
+    test_link(build, target, compiler, compiler.host);
+}
+
+/// Link all libtest rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn test_link(build: &Build,
+                 target: &str,
+                 compiler: &Compiler,
+                 host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+    add_to_sysroot(&out_dir, &libdir);
+}
+
+/// Build the compiler.
+///
+/// This will build the compiler for a particular stage of the build using
+/// the `compiler` targeting the `target` architecture. The artifacts
+/// created will also be linked into the sysroot directory.
+pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} compiler artifacts ({} -> {})",
+             compiler.stage, compiler.host, target);
+
+    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+    build.clear_if_dirty(&out_dir, &libtest_shim(build, compiler, target));
+
+    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
+    cargo.arg("--features").arg(build.rustc_features())
+         .arg("--manifest-path")
+         .arg(build.src.join("src/rustc/Cargo.toml"));
+
+    // Set some configuration variables picked up by build scripts and
+    // the compiler alike
+    cargo.env("CFG_RELEASE", &build.release)
+         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
+         .env("CFG_VERSION", &build.version)
+         .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key)
+         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new()))
+         .env("CFG_LIBDIR_RELATIVE", "lib");
+
+    if let Some(ref ver_date) = build.ver_date {
+        cargo.env("CFG_VER_DATE", ver_date);
+    }
+    if let Some(ref ver_hash) = build.ver_hash {
+        cargo.env("CFG_VER_HASH", ver_hash);
+    }
+    if !build.unstable_features {
+        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
+    }
+    cargo.env("LLVM_CONFIG", build.llvm_config(target));
+    if build.config.llvm_static_stdcpp {
+        cargo.env("LLVM_STATIC_STDCPP",
+                  compiler_file(build.cxx(target), "libstdc++.a"));
+    }
+    if let Some(ref s) = build.config.rustc_default_linker {
+        cargo.env("CFG_DEFAULT_LINKER", s);
+    }
+    if let Some(ref s) = build.config.rustc_default_ar {
+        cargo.env("CFG_DEFAULT_AR", s);
+    }
+    build.run(&mut cargo);
+
+    rustc_link(build, target, compiler, compiler.host);
+}
+
+/// Link all librustc rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn rustc_link(build: &Build,
+                  target: &str,
+                  compiler: &Compiler,
+                  host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+    add_to_sysroot(&out_dir, &libdir);
+}
+
+/// Cargo's output path for the standard library in a given stage, compiled
+/// by a particular compiler for the specified target.
+fn libstd_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+    build.cargo_out(compiler, Mode::Libstd, target).join("libstd_shim.rlib")
+}
+
+/// Cargo's output path for libtest in a given stage, compiled by a particular
+/// compiler for the specified target.
+fn libtest_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+    build.cargo_out(compiler, Mode::Libtest, target).join("libtest_shim.rlib")
+}
+
+fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
+    let out = output(Command::new(compiler)
+                            .arg(format!("-print-file-name={}", file)));
+    PathBuf::from(out.trim())
+}
+
+/// Prepare a new compiler from the artifacts in `stage`
+///
+/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
+/// must have been previously produced by the `stage - 1` build.config.build
+/// compiler.
+pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
+    assert!(stage > 0, "the stage0 compiler isn't assembled, it's downloaded");
+    // The compiler that we're assembling
+    let target_compiler = Compiler::new(stage, host);
+
+    // The compiler that compiled the compiler we're assembling
+    let build_compiler = Compiler::new(stage - 1, &build.config.build);
+
+    // Clear out old files
+    let sysroot = build.sysroot(&target_compiler);
+    let _ = fs::remove_dir_all(&sysroot);
+    t!(fs::create_dir_all(&sysroot));
+
+    // Link in all dylibs to the libdir
+    let sysroot_libdir = sysroot.join(libdir(host));
+    t!(fs::create_dir_all(&sysroot_libdir));
+    let src_libdir = build.sysroot_libdir(&build_compiler, host);
+    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
+        let filename = f.file_name().into_string().unwrap();
+        if is_dylib(&filename) {
+            copy(&f.path(), &sysroot_libdir.join(&filename));
+        }
+    }
+
+    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
+
+    // Link the compiler binary itself into place
+    let rustc = out_dir.join(exe("rustc", host));
+    let bindir = sysroot.join("bin");
+    t!(fs::create_dir_all(&bindir));
+    let compiler = build.compiler_path(&Compiler::new(stage, host));
+    let _ = fs::remove_file(&compiler);
+    copy(&rustc, &compiler);
+
+    // See if rustdoc exists to link it into place
+    let rustdoc = exe("rustdoc", host);
+    let rustdoc_src = out_dir.join(&rustdoc);
+    let rustdoc_dst = bindir.join(&rustdoc);
+    if fs::metadata(&rustdoc_src).is_ok() {
+        let _ = fs::remove_file(&rustdoc_dst);
+        copy(&rustdoc_src, &rustdoc_dst);
+    }
+}
+
+/// Link some files into a rustc sysroot.
+///
+/// For a particular stage this will link all of the contents of `out_dir`
+/// into the sysroot of the `host` compiler, assuming the artifacts are
+/// compiled for the specified `target`.
+fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
+    // Collect the set of all files in the dependencies directory, keyed
+    // off the name of the library. We assume everything is of the form
+    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
+    // `<hash>` values for the same name (of old builds).
+    let mut map = HashMap::new();
+    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
+        let filename = file.file_name().into_string().unwrap();
+
+        // We're only interested in linking rlibs + dylibs, other things like
+        // unit tests don't get linked in
+        if !filename.ends_with(".rlib") &&
+           !filename.ends_with(".lib") &&
+           !is_dylib(&filename) {
+            continue
+        }
+        let file = file.path();
+        let dash = filename.find("-").unwrap();
+        let key = (filename[..dash].to_string(),
+                   file.extension().unwrap().to_owned());
+        map.entry(key).or_insert(Vec::new())
+           .push(file.clone());
+    }
+
+    // For all hash values found, pick the most recent one to move into the
+    // sysroot, that should be the one we just built.
+    for (_, paths) in map {
+        let (_, path) = paths.iter().map(|path| {
+            (mtime(&path).seconds(), path)
+        }).max().unwrap();
+        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
+    }
+}
+
+/// Build a tool in `src/tools`
+///
+/// This will build the specified tool with the specified `host` compiler in
+/// `stage` into the normal cargo output directory.
+pub fn tool(build: &Build, stage: u32, host: &str, tool: &str) {
+    println!("Building stage{} tool {} ({})", stage, tool, host);
+
+    let compiler = Compiler::new(stage, host);
+
+    // FIXME: need to clear out previous tool and ideally deps, may require
+    //        isolating output directories or require a pseudo shim step to
+    //        clear out all the info.
+    //
+    //        Maybe when libstd is compiled it should clear out the rustc of the
+    //        corresponding stage?
+    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
+    // build.clear_if_dirty(&out_dir, &libstd_shim(build, stage, &host, target));
+
+    let mut cargo = build.cargo(&compiler, Mode::Tool, host, "build");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join(format!("src/tools/{}/Cargo.toml", tool)));
+    build.run(&mut cargo);
+}
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
new file mode 100644 (file)
index 0000000..498196e
--- /dev/null
@@ -0,0 +1,396 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Serialized configuration of a build.
+//!
+//! This module implements parsing `config.mk` and `config.toml` configuration
+//! files to tweak how the build runs.
+
+use std::collections::HashMap;
+use std::env;
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::PathBuf;
+use std::process;
+
+use num_cpus;
+use rustc_serialize::Decodable;
+use toml::{Parser, Decoder, Value};
+
+/// Global configuration for the entire build and/or bootstrap.
+///
+/// This structure is derived from a combination of both `config.toml` and
+/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
+/// is used all that much, so this is primarily filled out by `config.mk` which
+/// is generated from `./configure`.
+///
+/// Note that this structure is not decoded directly into, but rather it is
+/// filled out from the decoded forms of the structs below. For documentation
+/// each field, see the corresponding fields in
+/// `src/bootstrap/config.toml.example`.
+#[derive(Default)]
+pub struct Config {
+    pub ccache: bool,
+    pub ninja: bool,
+    pub verbose: bool,
+    pub submodules: bool,
+    pub compiler_docs: bool,
+    pub docs: bool,
+    pub target_config: HashMap<String, Target>,
+
+    // llvm codegen options
+    pub llvm_assertions: bool,
+    pub llvm_optimize: bool,
+    pub llvm_version_check: bool,
+    pub llvm_static_stdcpp: bool,
+
+    // rust codegen options
+    pub rust_optimize: bool,
+    pub rust_codegen_units: u32,
+    pub rust_debug_assertions: bool,
+    pub rust_debuginfo: bool,
+    pub rust_rpath: bool,
+    pub rustc_default_linker: Option<String>,
+    pub rustc_default_ar: Option<String>,
+    pub rust_optimize_tests: bool,
+    pub rust_debuginfo_tests: bool,
+
+    pub build: String,
+    pub host: Vec<String>,
+    pub target: Vec<String>,
+    pub rustc: Option<PathBuf>,
+    pub cargo: Option<PathBuf>,
+    pub local_rebuild: bool,
+
+    // libstd features
+    pub debug_jemalloc: bool,
+    pub use_jemalloc: bool,
+
+    // misc
+    pub channel: String,
+    pub musl_root: Option<PathBuf>,
+    pub prefix: Option<String>,
+}
+
+/// Per-target configuration stored in the global configuration structure.
+#[derive(Default)]
+pub struct Target {
+    pub llvm_config: Option<PathBuf>,
+    pub jemalloc: Option<PathBuf>,
+    pub cc: Option<PathBuf>,
+    pub cxx: Option<PathBuf>,
+    pub ndk: Option<PathBuf>,
+}
+
+/// Structure of the `config.toml` file that configuration is read from.
+///
+/// This structure uses `Decodable` to automatically decode a TOML configuration
+/// file into this format, and then this is traversed and written into the above
+/// `Config` structure.
+#[derive(RustcDecodable, Default)]
+struct TomlConfig {
+    build: Option<Build>,
+    llvm: Option<Llvm>,
+    rust: Option<Rust>,
+    target: Option<HashMap<String, TomlTarget>>,
+}
+
+/// TOML representation of various global build decisions.
+#[derive(RustcDecodable, Default, Clone)]
+struct Build {
+    build: Option<String>,
+    host: Vec<String>,
+    target: Vec<String>,
+    cargo: Option<String>,
+    rustc: Option<String>,
+    compiler_docs: Option<bool>,
+    docs: Option<bool>,
+}
+
+/// TOML representation of how the LLVM build is configured.
+#[derive(RustcDecodable, Default)]
+struct Llvm {
+    ccache: Option<bool>,
+    ninja: Option<bool>,
+    assertions: Option<bool>,
+    optimize: Option<bool>,
+    version_check: Option<bool>,
+    static_libstdcpp: Option<bool>,
+}
+
+/// TOML representation of how the Rust build is configured.
+#[derive(RustcDecodable, Default)]
+struct Rust {
+    optimize: Option<bool>,
+    codegen_units: Option<u32>,
+    debug_assertions: Option<bool>,
+    debuginfo: Option<bool>,
+    debug_jemalloc: Option<bool>,
+    use_jemalloc: Option<bool>,
+    default_linker: Option<String>,
+    default_ar: Option<String>,
+    channel: Option<String>,
+    musl_root: Option<String>,
+    rpath: Option<bool>,
+    optimize_tests: Option<bool>,
+    debuginfo_tests: Option<bool>,
+}
+
+/// TOML representation of how each build target is configured.
+#[derive(RustcDecodable, Default)]
+struct TomlTarget {
+    llvm_config: Option<String>,
+    jemalloc: Option<String>,
+    cc: Option<String>,
+    cxx: Option<String>,
+    android_ndk: Option<String>,
+}
+
+impl Config {
+    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
+        let mut config = Config::default();
+        config.llvm_optimize = true;
+        config.use_jemalloc = true;
+        config.rust_optimize = true;
+        config.rust_optimize_tests = true;
+        config.submodules = true;
+        config.docs = true;
+        config.rust_rpath = true;
+        config.rust_codegen_units = 1;
+        config.build = build.to_string();
+        config.channel = "dev".to_string();
+
+        let toml = file.map(|file| {
+            let mut f = t!(File::open(&file));
+            let mut toml = String::new();
+            t!(f.read_to_string(&mut toml));
+            let mut p = Parser::new(&toml);
+            let table = match p.parse() {
+                Some(table) => table,
+                None => {
+                    println!("failed to parse TOML configuration:");
+                    for err in p.errors.iter() {
+                        let (loline, locol) = p.to_linecol(err.lo);
+                        let (hiline, hicol) = p.to_linecol(err.hi);
+                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
+                                 hicol, err.desc);
+                    }
+                    process::exit(2);
+                }
+            };
+            let mut d = Decoder::new(Value::Table(table));
+            match Decodable::decode(&mut d) {
+                Ok(cfg) => cfg,
+                Err(e) => {
+                    println!("failed to decode TOML: {}", e);
+                    process::exit(2);
+                }
+            }
+        }).unwrap_or_else(|| TomlConfig::default());
+
+        let build = toml.build.clone().unwrap_or(Build::default());
+        set(&mut config.build, build.build.clone());
+        config.host.push(config.build.clone());
+        for host in build.host.iter() {
+            if !config.host.contains(host) {
+                config.host.push(host.clone());
+            }
+        }
+        for target in config.host.iter().chain(&build.target) {
+            if !config.target.contains(target) {
+                config.target.push(target.clone());
+            }
+        }
+        config.rustc = build.rustc.map(PathBuf::from);
+        config.cargo = build.cargo.map(PathBuf::from);
+        set(&mut config.compiler_docs, build.compiler_docs);
+        set(&mut config.docs, build.docs);
+
+        if let Some(ref llvm) = toml.llvm {
+            set(&mut config.ccache, llvm.ccache);
+            set(&mut config.ninja, llvm.ninja);
+            set(&mut config.llvm_assertions, llvm.assertions);
+            set(&mut config.llvm_optimize, llvm.optimize);
+            set(&mut config.llvm_version_check, llvm.version_check);
+            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
+        }
+        if let Some(ref rust) = toml.rust {
+            set(&mut config.rust_debug_assertions, rust.debug_assertions);
+            set(&mut config.rust_debuginfo, rust.debuginfo);
+            set(&mut config.rust_optimize, rust.optimize);
+            set(&mut config.rust_optimize_tests, rust.optimize_tests);
+            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
+            set(&mut config.rust_rpath, rust.rpath);
+            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
+            set(&mut config.use_jemalloc, rust.use_jemalloc);
+            set(&mut config.channel, rust.channel.clone());
+            config.rustc_default_linker = rust.default_linker.clone();
+            config.rustc_default_ar = rust.default_ar.clone();
+            config.musl_root = rust.musl_root.clone().map(PathBuf::from);
+
+            match rust.codegen_units {
+                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
+                Some(n) => config.rust_codegen_units = n,
+                None => {}
+            }
+        }
+
+        if let Some(ref t) = toml.target {
+            for (triple, cfg) in t {
+                let mut target = Target::default();
+
+                if let Some(ref s) = cfg.llvm_config {
+                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
+                }
+                if let Some(ref s) = cfg.jemalloc {
+                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
+                }
+                if let Some(ref s) = cfg.android_ndk {
+                    target.ndk = Some(env::current_dir().unwrap().join(s));
+                }
+                target.cxx = cfg.cxx.clone().map(PathBuf::from);
+                target.cc = cfg.cc.clone().map(PathBuf::from);
+
+                config.target_config.insert(triple.clone(), target);
+            }
+        }
+
+        return config
+    }
+
+    /// "Temporary" routine to parse `config.mk` into this configuration.
+    ///
+    /// While we still have `./configure` this implements the ability to decode
+    /// that configuration into this. This isn't exactly a full-blown makefile
+    /// parser, but hey it gets the job done!
+    pub fn update_with_config_mk(&mut self) {
+        let mut config = String::new();
+        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
+        for line in config.lines() {
+            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
+            let key = parts.next().unwrap();
+            let value = match parts.next() {
+                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
+                Some(n) => n,
+                None => continue
+            };
+
+            macro_rules! check {
+                ($(($name:expr, $val:expr),)*) => {
+                    if value == "1" {
+                        $(
+                            if key == concat!("CFG_ENABLE_", $name) {
+                                $val = true;
+                                continue
+                            }
+                            if key == concat!("CFG_DISABLE_", $name) {
+                                $val = false;
+                                continue
+                            }
+                        )*
+                    }
+                }
+            }
+
+            check! {
+                ("CCACHE", self.ccache),
+                ("MANAGE_SUBMODULES", self.submodules),
+                ("COMPILER_DOCS", self.compiler_docs),
+                ("DOCS", self.docs),
+                ("LLVM_ASSERTIONS", self.llvm_assertions),
+                ("OPTIMIZE_LLVM", self.llvm_optimize),
+                ("LLVM_VERSION_CHECK", self.llvm_version_check),
+                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
+                ("OPTIMIZE", self.rust_optimize),
+                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
+                ("DEBUGINFO", self.rust_debuginfo),
+                ("JEMALLOC", self.use_jemalloc),
+                ("DEBUG_JEMALLOC", self.debug_jemalloc),
+                ("RPATH", self.rust_rpath),
+                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
+                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
+                ("LOCAL_REBUILD", self.local_rebuild),
+            }
+
+            match key {
+                "CFG_BUILD" => self.build = value.to_string(),
+                "CFG_HOST" => {
+                    self.host = value.split(" ").map(|s| s.to_string())
+                                     .collect();
+                }
+                "CFG_TARGET" => {
+                    self.target = value.split(" ").map(|s| s.to_string())
+                                       .collect();
+                }
+                "CFG_MUSL_ROOT" if value.len() > 0 => {
+                    self.musl_root = Some(PathBuf::from(value));
+                }
+                "CFG_DEFAULT_AR" if value.len() > 0 => {
+                    self.rustc_default_ar = Some(value.to_string());
+                }
+                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
+                    self.rustc_default_linker = Some(value.to_string());
+                }
+                "CFG_RELEASE_CHANNEL" => {
+                    self.channel = value.to_string();
+                }
+                "CFG_PREFIX" => {
+                    self.prefix = Some(value.to_string());
+                }
+                "CFG_LLVM_ROOT" if value.len() > 0 => {
+                    let target = self.target_config.entry(self.build.clone())
+                                     .or_insert(Target::default());
+                    let root = PathBuf::from(value);
+                    target.llvm_config = Some(root.join("bin/llvm-config"));
+                }
+                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
+                    let target = self.target_config.entry(self.build.clone())
+                                     .or_insert(Target::default());
+                    target.jemalloc = Some(PathBuf::from(value));
+                }
+                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
+                    let target = "arm-linux-androideabi".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
+                    let target = "armv7-linux-androideabi".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
+                    let target = "i686-linux-android".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
+                    let target = "aarch64-linux-android".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
+                    self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
+                    self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
+                }
+                _ => {}
+            }
+        }
+    }
+}
+
+fn set<T>(field: &mut T, val: Option<T>) {
+    if let Some(v) = val {
+        *field = v;
+    }
+}
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
new file mode 100644 (file)
index 0000000..1cf71c3
--- /dev/null
@@ -0,0 +1,319 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the various distribution aspects of the compiler.
+//!
+//! This module is responsible for creating tarballs of the standard library,
+//! compiler, and documentation. This ends up being what we distribute to
+//! everyone as well.
+//!
+//! No tarball is actually created literally in this file, but rather we shell
+//! out to `rust-installer` still. This may one day be replaced with bits and
+//! pieces of `rustup.rs`!
+
+use std::fs::{self, File};
+use std::io::Write;
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use {Build, Compiler};
+use util::{cp_r, libdir, is_dylib};
+
+fn package_vers(build: &Build) -> &str {
+    match &build.config.channel[..] {
+        "stable" => &build.release,
+        "beta" => "beta",
+        "nightly" => "nightly",
+        _ => &build.release,
+    }
+}
+
+fn distdir(build: &Build) -> PathBuf {
+    build.out.join("dist")
+}
+
+fn tmpdir(build: &Build) -> PathBuf {
+    build.out.join("tmp/dist")
+}
+
+/// Builds the `rust-docs` installer component.
+///
+/// Slurps up documentation from the `stage`'s `host`.
+pub fn docs(build: &Build, stage: u32, host: &str) {
+    println!("Dist docs stage{} ({})", stage, host);
+    let name = format!("rust-docs-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, name));
+    let _ = fs::remove_dir_all(&image);
+
+    let dst = image.join("share/doc/rust/html");
+    t!(fs::create_dir_all(&dst));
+    let src = build.out.join(host).join("doc");
+    cp_r(&src, &dst);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust-Documentation")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-documentation-is-installed.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rust-docs")
+       .arg("--legacy-manifest-dirs=rustlib,cargo")
+       .arg("--bulk-dirs=share/doc/rust/html");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+
+    // As part of this step, *also* copy the docs directory to a directory which
+    // buildbot typically uploads.
+    if host == build.config.build {
+        let dst = distdir(build).join("doc").join(&build.package_vers);
+        t!(fs::create_dir_all(&dst));
+        cp_r(&src, &dst);
+    }
+}
+
+/// Build the `rust-mingw` installer component.
+///
+/// This contains all the bits and pieces to run the MinGW Windows targets
+/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
+/// Currently just shells out to a python script, but that should be rewritten
+/// in Rust.
+pub fn mingw(build: &Build, host: &str) {
+    println!("Dist mingw ({})", host);
+    let name = format!("rust-mingw-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+    let _ = fs::remove_dir_all(&image);
+
+    // The first argument to the script is a "temporary directory" which is just
+    // thrown away (this contains the runtime DLLs included in the rustc package
+    // above) and the second argument is where to place all the MinGW components
+    // (which is what we want).
+    //
+    // FIXME: this script should be rewritten into Rust
+    let mut cmd = Command::new("python");
+    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
+       .arg(tmpdir(build))
+       .arg(&image)
+       .arg(host);
+    build.run(&mut cmd);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust-MinGW")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-MinGW-is-installed.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rust-mingw")
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+}
+
+/// Creates the `rustc` installer component.
+pub fn rustc(build: &Build, stage: u32, host: &str) {
+    println!("Dist rustc stage{} ({})", stage, host);
+    let name = format!("rustc-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+    let _ = fs::remove_dir_all(&image);
+    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
+    let _ = fs::remove_dir_all(&overlay);
+
+    // Prepare the rustc "image", what will actually end up getting installed
+    prepare_image(build, stage, host, &image);
+
+    // Prepare the overlay which is part of the tarball but won't actually be
+    // installed
+    let cp = |file: &str| {
+        install(&build.src.join(file), &overlay, 0o644);
+    };
+    cp("COPYRIGHT");
+    cp("LICENSE-APACHE");
+    cp("LICENSE-MIT");
+    cp("README.md");
+    // tiny morsel of metadata is used by rust-packaging
+    let version = &build.version;
+    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+    // On MinGW we've got a few runtime DLL dependencies that we need to
+    // include. The first argument to this script is where to put these DLLs
+    // (the image we're creating), and the second argument is a junk directory
+    // to ignore all other MinGW stuff the script creates.
+    //
+    // On 32-bit MinGW we're always including a DLL which needs some extra
+    // licenses to distribute. On 64-bit MinGW we don't actually distribute
+    // anything requiring us to distribute a license, but it's likely the
+    // install will *also* include the rust-mingw package, which also needs
+    // licenses, so to be safe we just include it here in all MinGW packages.
+    //
+    // FIXME: this script should be rewritten into Rust
+    if host.contains("pc-windows-gnu") {
+        let mut cmd = Command::new("python");
+        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
+           .arg(&image)
+           .arg(tmpdir(build))
+           .arg(host);
+        build.run(&mut cmd);
+
+        let dst = image.join("share/doc");
+        t!(fs::create_dir_all(&dst));
+        cp_r(&build.src.join("src/etc/third-party"), &dst);
+    }
+
+    // Finally, wrap everything up in a nice tarball!
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-is-ready-to-roll.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rustc")
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+    t!(fs::remove_dir_all(&overlay));
+
+    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
+        let src = build.sysroot(&Compiler::new(stage, host));
+        let libdir = libdir(host);
+
+        // Copy rustc/rustdoc binaries
+        t!(fs::create_dir_all(image.join("bin")));
+        cp_r(&src.join("bin"), &image.join("bin"));
+
+        // Copy runtime DLLs needed by the compiler
+        if libdir != "bin" {
+            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
+                let name = entry.file_name();
+                if let Some(s) = name.to_str() {
+                    if is_dylib(s) {
+                        install(&entry.path(), &image.join(libdir), 0o644);
+                    }
+                }
+            }
+        }
+
+        // Man pages
+        t!(fs::create_dir_all(image.join("share/man/man1")));
+        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
+
+        // Debugger scripts
+        debugger_scripts(build, &image, host);
+
+        // Misc license info
+        let cp = |file: &str| {
+            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+        };
+        cp("COPYRIGHT");
+        cp("LICENSE-APACHE");
+        cp("LICENSE-MIT");
+        cp("README.md");
+    }
+}
+
+/// Copies debugger scripts for `host` into the `sysroot` specified.
+pub fn debugger_scripts(build: &Build,
+                        sysroot: &Path,
+                        host: &str) {
+    let cp_debugger_script = |file: &str| {
+        let dst = sysroot.join("lib/rustlib/etc");
+        t!(fs::create_dir_all(&dst));
+        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+    };
+    if host.contains("windows-msvc") {
+        // no debugger scripts
+    } else {
+        cp_debugger_script("debugger_pretty_printers_common.py");
+
+        // gdb debugger scripts
+        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+                0o755);
+
+        cp_debugger_script("gdb_load_rust_pretty_printers.py");
+        cp_debugger_script("gdb_rust_pretty_printing.py");
+
+        // lldb debugger scripts
+        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+                0o755);
+
+        cp_debugger_script("lldb_rust_formatters.py");
+    }
+}
+
+/// Creates the `rust-std` installer component as compiled by `compiler` for the
+/// target `target`.
+pub fn std(build: &Build, compiler: &Compiler, target: &str) {
+    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
+             target);
+    let name = format!("rust-std-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+    let _ = fs::remove_dir_all(&image);
+
+    let dst = image.join("lib/rustlib").join(target);
+    t!(fs::create_dir_all(&dst));
+    let src = build.sysroot(compiler).join("lib/rustlib");
+    cp_r(&src.join(target), &dst);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=std-is-standing-at-the-ready.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, target))
+       .arg(format!("--component-name=rust-std-{}", target))
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+}
+
+fn install(src: &Path, dstdir: &Path, perms: u32) {
+    let dst = dstdir.join(src.file_name().unwrap());
+    t!(fs::create_dir_all(dstdir));
+    t!(fs::copy(src, &dst));
+    chmod(&dst, perms);
+}
+
+#[cfg(unix)]
+fn chmod(path: &Path, perms: u32) {
+    use std::os::unix::fs::*;
+    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
+}
+#[cfg(windows)]
+fn chmod(_path: &Path, _perms: u32) {}
+
+// We have to run a few shell scripts, which choke quite a bit on both `\`
+// characters and on `C:\` paths, so normalize both of them away.
+fn sanitize_sh(path: &Path) -> String {
+    let path = path.to_str().unwrap().replace("\\", "/");
+    return change_drive(&path).unwrap_or(path);
+
+    fn change_drive(s: &str) -> Option<String> {
+        let mut ch = s.chars();
+        let drive = ch.next().unwrap_or('C');
+        if ch.next() != Some(':') {
+            return None
+        }
+        if ch.next() != Some('/') {
+            return None
+        }
+        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
+    }
+}
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
new file mode 100644 (file)
index 0000000..ac90ab5
--- /dev/null
@@ -0,0 +1,207 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Documentation generation for rustbuild.
+//!
+//! This module implements generation for all bits and pieces of documentation
+//! for the Rust project. This notably includes suites like the rust book, the
+//! nomicon, standalone documentation, etc.
+//!
+//! Everything here is basically just a shim around calling either `rustbook` or
+//! `rustdoc`.
+
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::Path;
+use std::process::Command;
+
+use {Build, Compiler, Mode};
+use util::{up_to_date, cp_r};
+
+/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
+/// `name` into the `out` path.
+///
+/// This will not actually generate any documentation if the documentation has
+/// already been generated.
+pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {
+    t!(fs::create_dir_all(out));
+
+    let out = out.join(name);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let src = build.src.join("src/doc").join(name);
+    let index = out.join("index.html");
+    let rustbook = build.tool(&compiler, "rustbook");
+    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
+        return
+    }
+    println!("Rustbook stage{} ({}) - {}", stage, target, name);
+    let _ = fs::remove_dir_all(&out);
+    build.run(build.tool_cmd(&compiler, "rustbook")
+                   .arg("build")
+                   .arg(&src)
+                   .arg(out));
+}
+
+/// Generates all standalone documentation as compiled by the rustdoc in `stage`
+/// for the `target` into `out`.
+///
+/// This will list all of `src/doc` looking for markdown files and appropriately
+/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
+/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
+///
+/// In the end, this is just a glorified wrapper around rustdoc!
+pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} standalone ({})", stage, target);
+    t!(fs::create_dir_all(out));
+
+    let compiler = Compiler::new(stage, &build.config.build);
+
+    let favicon = build.src.join("src/doc/favicon.inc");
+    let footer = build.src.join("src/doc/footer.inc");
+    let full_toc = build.src.join("src/doc/full-toc.inc");
+    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
+
+    let version_input = build.src.join("src/doc/version_info.html.template");
+    let version_info = out.join("version_info.html");
+
+    if !up_to_date(&version_input, &version_info) {
+        let mut info = String::new();
+        t!(t!(File::open(&version_input)).read_to_string(&mut info));
+        let blank = String::new();
+        let short = build.short_ver_hash.as_ref().unwrap_or(&blank);
+        let hash = build.ver_hash.as_ref().unwrap_or(&blank);
+        let info = info.replace("VERSION", &build.release)
+                       .replace("SHORT_HASH", short)
+                       .replace("STAMP", hash);
+        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
+    }
+
+    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
+        let file = t!(file);
+        let path = file.path();
+        let filename = path.file_name().unwrap().to_str().unwrap();
+        if !filename.ends_with(".md") || filename == "README.md" {
+            continue
+        }
+
+        let html = out.join(filename).with_extension("html");
+        let rustdoc = build.rustdoc(&compiler);
+        if up_to_date(&path, &html) &&
+           up_to_date(&footer, &html) &&
+           up_to_date(&favicon, &html) &&
+           up_to_date(&full_toc, &html) &&
+           up_to_date(&version_info, &html) &&
+           up_to_date(&rustdoc, &html) {
+            continue
+        }
+
+        let mut cmd = Command::new(&rustdoc);
+        build.add_rustc_lib_path(&compiler, &mut cmd);
+        cmd.arg("--html-after-content").arg(&footer)
+           .arg("--html-before-content").arg(&version_info)
+           .arg("--html-in-header").arg(&favicon)
+           .arg("--markdown-playground-url")
+           .arg("https://play.rust-lang.org/")
+           .arg("-o").arg(out)
+           .arg(&path);
+
+        if filename == "reference.md" {
+           cmd.arg("--html-in-header").arg(&full_toc);
+        }
+
+        if filename == "not_found.md" {
+            cmd.arg("--markdown-no-toc")
+               .arg("--markdown-css")
+               .arg("https://doc.rust-lang.org/rust.css");
+        } else {
+            cmd.arg("--markdown-css").arg("rust.css");
+        }
+        build.run(&mut cmd);
+    }
+}
+
+/// Compile all standard library documentation.
+///
+/// This will generate all documentation for the standard library and its
+/// dependencies. This is largely just a wrapper around `cargo doc`.
+pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} std ({})", stage, target);
+    t!(fs::create_dir_all(out));
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Libstd)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+
+    build.clear_if_dirty(&out_dir, &rustdoc);
+
+    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"))
+         .arg("--features").arg(build.std_features());
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Compile all libtest documentation.
+///
+/// This will generate all documentation for libtest and its dependencies. This
+/// is largely just a wrapper around `cargo doc`.
+pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} test ({})", stage, target);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Libtest)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+
+    build.clear_if_dirty(&out_dir, &rustdoc);
+
+    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Generate all compiler documentation.
+///
+/// This will generate all documentation for the compiler libraries and their
+/// dependencies. This is largely just a wrapper around `cargo doc`.
+pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} compiler ({})", stage, target);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Librustc)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+    if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) {
+        t!(fs::remove_dir_all(&out_dir));
+    }
+    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/Cargo.toml"))
+         .arg("--features").arg(build.rustc_features());
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Generates the HTML rendered error-index by running the
+/// `error_index_generator` tool.
+pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} error index ({})", stage, target);
+    t!(fs::create_dir_all(out));
+    let compiler = Compiler::new(stage, &build.config.build);
+    let mut index = build.tool_cmd(&compiler, "error_index_generator");
+    index.arg("html");
+    index.arg(out.join("error-index.html"));
+
+    // FIXME: shouldn't have to pass this env var
+    index.env("CFG_BUILD", &build.config.build);
+
+    build.run(&mut index);
+}
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
new file mode 100644 (file)
index 0000000..d925997
--- /dev/null
@@ -0,0 +1,103 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Command-line interface of the rustbuild build system.
+//!
+//! This module implements the command-line parsing of the build system which
+//! has various flags to configure how it's run.
+
+use std::fs;
+use std::path::PathBuf;
+use std::process;
+use std::slice;
+
+use getopts::Options;
+
+/// Deserialized version of all flags for this compile.
+pub struct Flags {
+    pub verbose: bool,
+    pub stage: Option<u32>,
+    pub build: String,
+    pub host: Filter,
+    pub target: Filter,
+    pub step: Vec<String>,
+    pub config: Option<PathBuf>,
+    pub src: Option<PathBuf>,
+    pub jobs: Option<u32>,
+    pub args: Vec<String>,
+    pub clean: bool,
+}
+
+pub struct Filter {
+    values: Vec<String>,
+}
+
+impl Flags {
+    pub fn parse(args: &[String]) -> Flags {
+        let mut opts = Options::new();
+        opts.optflag("v", "verbose", "use verbose output");
+        opts.optopt("", "config", "TOML configuration file for build", "FILE");
+        opts.optmulti("", "host", "host targets to build", "HOST");
+        opts.reqopt("", "build", "build target of the stage0 compiler", "BUILD");
+        opts.optmulti("", "target", "targets to build", "TARGET");
+        opts.optmulti("s", "step", "build step to execute", "STEP");
+        opts.optopt("", "stage", "stage to build", "N");
+        opts.optopt("", "src", "path to repo root", "DIR");
+        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
+        opts.optflag("", "clean", "clean output directory");
+        opts.optflag("h", "help", "print this help message");
+
+        let usage = |n| -> ! {
+            let brief = format!("Usage: rust.py [options]");
+            print!("{}", opts.usage(&brief));
+            process::exit(n);
+        };
+
+        let m = opts.parse(args).unwrap_or_else(|e| {
+            println!("failed to parse options: {}", e);
+            usage(1);
+        });
+        if m.opt_present("h") {
+            usage(0);
+        }
+
+        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
+            if fs::metadata("config.toml").is_ok() {
+                Some(PathBuf::from("config.toml"))
+            } else {
+                None
+            }
+        });
+
+        Flags {
+            verbose: m.opt_present("v"),
+            clean: m.opt_present("clean"),
+            stage: m.opt_str("stage").map(|j| j.parse().unwrap()),
+            build: m.opt_str("build").unwrap(),
+            host: Filter { values: m.opt_strs("host") },
+            target: Filter { values: m.opt_strs("target") },
+            step: m.opt_strs("step"),
+            config: cfg_file,
+            src: m.opt_str("src").map(PathBuf::from),
+            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
+            args: m.free.clone(),
+        }
+    }
+}
+
+impl Filter {
+    pub fn contains(&self, name: &str) -> bool {
+        self.values.len() == 0 || self.values.iter().any(|s| s == name)
+    }
+
+    pub fn iter(&self) -> slice::Iter<String> {
+        self.values.iter()
+    }
+}
diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs
new file mode 100644 (file)
index 0000000..4558e6f
--- /dev/null
@@ -0,0 +1,111 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Job management on Windows for bootstrapping
+//!
+//! Most of the time when you're running a build system (e.g. make) you expect
+//! Ctrl-C or abnormal termination to actually terminate the entire tree of
+//! process in play, not just the one at the top. This currently works "by
+//! default" on Unix platforms because Ctrl-C actually sends a signal to the
+//! *process group* rather than the parent process, so everything will get torn
+//! down. On Windows, however, this does not happen and Ctrl-C just kills the
+//! parent process.
+//!
+//! To achieve the same semantics on Windows we use Job Objects to ensure that
+//! all processes die at the same time. Job objects have a mode of operation
+//! where when all handles to the object are closed it causes all child
+//! processes associated with the object to be terminated immediately.
+//! Conveniently whenever a process in the job object spawns a new process the
+//! child will be associated with the job object as well. This means if we add
+//! ourselves to the job object we create then everything will get torn down!
+//!
+//! Unfortunately most of the time the build system is actually called from a
+//! python wrapper (which manages things like building the build system) so this
+//! all doesn't quite cut it so far. To go the last mile we duplicate the job
+//! object handle into our parent process (a python process probably) and then
+//! close our own handle. This means that the only handle to the job object
+//! resides in the parent python process, so when python dies the whole build
+//! system dies (as one would probably expect!).
+//!
+//! Note that this module has a #[cfg(windows)] above it as none of this logic
+//! is required on Unix.
+
+extern crate kernel32;
+extern crate winapi;
+
+use std::env;
+use std::io;
+use std::mem;
+
+use self::winapi::*;
+use self::kernel32::*;
+
+pub unsafe fn setup() {
+    // Create a new job object for us to use
+    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
+    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
+
+    // Indicate that when all handles to the job object are gone that all
+    // process in the object should be killed. Note that this includes our
+    // entire process tree by default because we've added ourselves and our
+    // children will reside in the job by default.
+    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
+    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+    let r = SetInformationJobObject(job,
+                                    JobObjectExtendedLimitInformation,
+                                    &mut info as *mut _ as LPVOID,
+                                    mem::size_of_val(&info) as DWORD);
+    assert!(r != 0, "{}", io::Error::last_os_error());
+
+    // Assign our process to this job object. Note that if this fails, one very
+    // likely reason is that we are ourselves already in a job object! This can
+    // happen on the build bots that we've got for Windows, or if just anyone
+    // else is instrumenting the build. In this case we just bail out
+    // immediately and assume that they take care of it.
+    //
+    // Also note that nested jobs (why this might fail) are supported in recent
+    // versions of Windows, but the version of Windows that our bots are running
+    // at least don't support nested job objects.
+    let r = AssignProcessToJobObject(job, GetCurrentProcess());
+    if r == 0 {
+        CloseHandle(job);
+        return
+    }
+
+    // If we've got a parent process (e.g. the python script that called us)
+    // then move ownership of this job object up to them. That way if the python
+    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
+    //
+    // If we don't have a parent (e.g. this was run directly) then we
+    // intentionally leak the job object handle. When our process exits
+    // (normally or abnormally) it will close the handle implicitly, causing all
+    // processes in the job to be cleaned up.
+    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
+        Ok(s) => s,
+        Err(..) => return,
+    };
+
+    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
+    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
+    let mut parent_handle = 0 as *mut _;
+    let r = DuplicateHandle(GetCurrentProcess(), job,
+                            parent, &mut parent_handle,
+                            0, FALSE, DUPLICATE_SAME_ACCESS);
+
+    // If this failed, well at least we tried! An example of DuplicateHandle
+    // failing in the past has been when the wrong python2 package spawed this
+    // build system (e.g. the `python2` package in MSYS instead of
+    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
+    // mode" here is that we only clean everything up when the build system
+    // dies, not when the python parent does, so not too bad.
+    if r != 0 {
+        CloseHandle(job);
+    }
+}
index ef6184d6ca76cf8fc3dbbb6b40e6dd0452a75098..943271fc8a641665734531b3393b32d4f37d1e5e 100644 (file)
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! A small helper library shared between the build system's executables
+//! Implementation of rustbuild, the Rust build system.
 //!
-//! Currently this just has some simple utilities for modifying the dynamic
-//! library lookup path.
+//! This module, and its descendants, are the implementation of the Rust build
+//! system. Most of this build system is backed by Cargo but the outer layer
+//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
+//! builds, building artifacts like LLVM, etc.
+//!
+//! More documentation can be found in each respective module below.
+
+extern crate build_helper;
+extern crate cmake;
+extern crate filetime;
+extern crate gcc;
+extern crate getopts;
+extern crate md5;
+extern crate num_cpus;
+extern crate rustc_serialize;
+extern crate toml;
 
+use std::cell::RefCell;
+use std::collections::HashMap;
 use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-
-/// Returns the environment variable which the dynamic library lookup path
-/// resides in for this platform.
-pub fn dylib_path_var() -> &'static str {
-    if cfg!(target_os = "windows") {
-        "PATH"
-    } else if cfg!(target_os = "macos") {
-        "DYLD_LIBRARY_PATH"
-    } else {
-        "LD_LIBRARY_PATH"
+use std::fs::{self, File};
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use build_helper::{run_silent, output};
+
+use util::{exe, mtime, libdir, add_lib_path};
+
+/// A helper macro to `unwrap` a result except also print out details like:
+///
+/// * The file/line of the panic
+/// * The expression that failed
+/// * The error itself
+///
+/// This is currently used judiciously throughout the build system rather than
+/// using a `Result` with `try!`, but this may change on day...
+macro_rules! t {
+    ($e:expr) => (match $e {
+        Ok(e) => e,
+        Err(e) => panic!("{} failed with {}", stringify!($e), e),
+    })
+}
+
+mod cc;
+mod channel;
+mod check;
+mod clean;
+mod compile;
+mod config;
+mod dist;
+mod doc;
+mod flags;
+mod native;
+mod sanity;
+mod step;
+pub mod util;
+
+#[cfg(windows)]
+mod job;
+
+#[cfg(not(windows))]
+mod job {
+    pub unsafe fn setup() {}
+}
+
+pub use config::Config;
+pub use flags::Flags;
+
+/// A structure representing a Rust compiler.
+///
+/// Each compiler has a `stage` that it is associated with and a `host` that
+/// corresponds to the platform the compiler runs on. This structure is used as
+/// a parameter to many methods below.
+#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
+pub struct Compiler<'a> {
+    stage: u32,
+    host: &'a str,
+}
+
+/// Global configuration for the build system.
+///
+/// This structure transitively contains all configuration for the build system.
+/// All filesystem-encoded configuration is in `config`, all flags are in
+/// `flags`, and then parsed or probed information is listed in the keys below.
+///
+/// This structure is a parameter of almost all methods in the build system,
+/// although most functions are implemented as free functions rather than
+/// methods specifically on this structure itself (to make it easier to
+/// organize).
+pub struct Build {
+    // User-specified configuration via config.toml
+    config: Config,
+
+    // User-specified configuration via CLI flags
+    flags: Flags,
+
+    // Derived properties from the above two configurations
+    cargo: PathBuf,
+    rustc: PathBuf,
+    src: PathBuf,
+    out: PathBuf,
+    release: String,
+    unstable_features: bool,
+    ver_hash: Option<String>,
+    short_ver_hash: Option<String>,
+    ver_date: Option<String>,
+    version: String,
+    package_vers: String,
+    bootstrap_key: String,
+    bootstrap_key_stage0: String,
+
+    // Probed tools at runtime
+    gdb_version: Option<String>,
+    lldb_version: Option<String>,
+    lldb_python_dir: Option<String>,
+
+    // Runtime state filled in later on
+    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
+    cxx: HashMap<String, gcc::Tool>,
+    compiler_rt_built: RefCell<HashMap<String, PathBuf>>,
+}
+
+/// The various "modes" of invoking Cargo.
+///
+/// These entries currently correspond to the various output directories of the
+/// build system, with each mod generating output in a different directory.
+#[derive(Clone, Copy)]
+pub enum Mode {
+    /// This cargo is going to build the standard library, placing output in the
+    /// "stageN-std" directory.
+    Libstd,
+
+    /// This cargo is going to build libtest, placing output in the
+    /// "stageN-test" directory.
+    Libtest,
+
+    /// This cargo is going to build librustc and compiler libraries, placing
+    /// output in the "stageN-rustc" directory.
+    Librustc,
+
+    /// This cargo is going to some build tool, placing output in the
+    /// "stageN-tools" directory.
+    Tool,
+}
+
+impl Build {
+    /// Creates a new set of build configuration from the `flags` on the command
+    /// line and the filesystem `config`.
+    ///
+    /// By default all build output will be placed in the current directory.
+    pub fn new(flags: Flags, config: Config) -> Build {
+        let cwd = t!(env::current_dir());
+        let src = flags.src.clone().unwrap_or(cwd.clone());
+        let out = cwd.join("build");
+
+        let stage0_root = out.join(&config.build).join("stage0/bin");
+        let rustc = match config.rustc {
+            Some(ref s) => PathBuf::from(s),
+            None => stage0_root.join(exe("rustc", &config.build)),
+        };
+        let cargo = match config.cargo {
+            Some(ref s) => PathBuf::from(s),
+            None => stage0_root.join(exe("cargo", &config.build)),
+        };
+
+        Build {
+            flags: flags,
+            config: config,
+            cargo: cargo,
+            rustc: rustc,
+            src: src,
+            out: out,
+
+            release: String::new(),
+            unstable_features: false,
+            ver_hash: None,
+            short_ver_hash: None,
+            ver_date: None,
+            version: String::new(),
+            bootstrap_key: String::new(),
+            bootstrap_key_stage0: String::new(),
+            package_vers: String::new(),
+            cc: HashMap::new(),
+            cxx: HashMap::new(),
+            compiler_rt_built: RefCell::new(HashMap::new()),
+            gdb_version: None,
+            lldb_version: None,
+            lldb_python_dir: None,
+        }
+    }
+
+    /// Executes the entire build, as configured by the flags and configuration.
+    pub fn build(&mut self) {
+        use step::Source::*;
+
+        unsafe {
+            job::setup();
+        }
+
+        if self.flags.clean {
+            return clean::clean(self);
+        }
+
+        self.verbose("finding compilers");
+        cc::find(self);
+        self.verbose("running sanity check");
+        sanity::check(self);
+        self.verbose("collecting channel variables");
+        channel::collect(self);
+        self.verbose("updating submodules");
+        self.update_submodules();
+
+        // The main loop of the build system.
+        //
+        // The `step::all` function returns a topographically sorted list of all
+        // steps that need to be executed as part of this build. Each step has a
+        // corresponding entry in `step.rs` and indicates some unit of work that
+        // needs to be done as part of the build.
+        //
+        // Almost all of these are simple one-liners that shell out to the
+        // corresponding functionality in the extra modules, where more
+        // documentation can be found.
+        for target in step::all(self) {
+            let doc_out = self.out.join(&target.target).join("doc");
+            match target.src {
+                Llvm { _dummy } => {
+                    native::llvm(self, target.target);
+                }
+                CompilerRt { _dummy } => {
+                    native::compiler_rt(self, target.target);
+                }
+                TestHelpers { _dummy } => {
+                    native::test_helpers(self, target.target);
+                }
+                Libstd { compiler } => {
+                    compile::std(self, target.target, &compiler);
+                }
+                Libtest { compiler } => {
+                    compile::test(self, target.target, &compiler);
+                }
+                Librustc { compiler } => {
+                    compile::rustc(self, target.target, &compiler);
+                }
+                LibstdLink { compiler, host } => {
+                    compile::std_link(self, target.target, &compiler, host);
+                }
+                LibtestLink { compiler, host } => {
+                    compile::test_link(self, target.target, &compiler, host);
+                }
+                LibrustcLink { compiler, host } => {
+                    compile::rustc_link(self, target.target, &compiler, host);
+                }
+                Rustc { stage: 0 } => {
+                    // nothing to do...
+                }
+                Rustc { stage } => {
+                    compile::assemble_rustc(self, stage, target.target);
+                }
+                ToolLinkchecker { stage } => {
+                    compile::tool(self, stage, target.target, "linkchecker");
+                }
+                ToolRustbook { stage } => {
+                    compile::tool(self, stage, target.target, "rustbook");
+                }
+                ToolErrorIndex { stage } => {
+                    compile::tool(self, stage, target.target,
+                                  "error_index_generator");
+                }
+                ToolCargoTest { stage } => {
+                    compile::tool(self, stage, target.target, "cargotest");
+                }
+                ToolTidy { stage } => {
+                    compile::tool(self, stage, target.target, "tidy");
+                }
+                ToolCompiletest { stage } => {
+                    compile::tool(self, stage, target.target, "compiletest");
+                }
+                DocBook { stage } => {
+                    doc::rustbook(self, stage, target.target, "book", &doc_out);
+                }
+                DocNomicon { stage } => {
+                    doc::rustbook(self, stage, target.target, "nomicon",
+                                  &doc_out);
+                }
+                DocStyle { stage } => {
+                    doc::rustbook(self, stage, target.target, "style",
+                                  &doc_out);
+                }
+                DocStandalone { stage } => {
+                    doc::standalone(self, stage, target.target, &doc_out);
+                }
+                DocStd { stage } => {
+                    doc::std(self, stage, target.target, &doc_out);
+                }
+                DocTest { stage } => {
+                    doc::test(self, stage, target.target, &doc_out);
+                }
+                DocRustc { stage } => {
+                    doc::rustc(self, stage, target.target, &doc_out);
+                }
+                DocErrorIndex { stage } => {
+                    doc::error_index(self, stage, target.target, &doc_out);
+                }
+
+                CheckLinkcheck { stage } => {
+                    check::linkcheck(self, stage, target.target);
+                }
+                CheckCargoTest { stage } => {
+                    check::cargotest(self, stage, target.target);
+                }
+                CheckTidy { stage } => {
+                    check::tidy(self, stage, target.target);
+                }
+                CheckRPass { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass", "run-pass");
+                }
+                CheckRPassFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass", "run-pass-fulldeps");
+                }
+                CheckCFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "compile-fail", "compile-fail");
+                }
+                CheckCFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "compile-fail", "compile-fail-fulldeps")
+                }
+                CheckPFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "parse-fail", "parse-fail");
+                }
+                CheckRFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-fail", "run-fail");
+                }
+                CheckRFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-fail", "run-fail-fulldeps");
+                }
+                CheckPretty { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "pretty");
+                }
+                CheckPrettyRPass { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass");
+                }
+                CheckPrettyRPassFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass-fulldeps");
+                }
+                CheckPrettyRFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-fail");
+                }
+                CheckPrettyRFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-fail-fulldeps");
+                }
+                CheckPrettyRPassValgrind { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass-valgrind");
+                }
+                CheckCodegen { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "codegen", "codegen");
+                }
+                CheckCodegenUnits { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "codegen-units", "codegen-units");
+                }
+                CheckIncremental { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "incremental", "incremental");
+                }
+                CheckUi { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "ui", "ui");
+                }
+                CheckDebuginfo { compiler } => {
+                    if target.target.contains("msvc") {
+                        // nothing to do
+                    } else if target.target.contains("apple") {
+                        check::compiletest(self, &compiler, target.target,
+                                           "debuginfo-lldb", "debuginfo");
+                    } else {
+                        check::compiletest(self, &compiler, target.target,
+                                           "debuginfo-gdb", "debuginfo");
+                    }
+                }
+                CheckRustdoc { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "rustdoc", "rustdoc");
+                }
+                CheckRPassValgrind { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass-valgrind", "run-pass-valgrind");
+                }
+                CheckDocs { compiler } => {
+                    check::docs(self, &compiler);
+                }
+                CheckErrorIndex { compiler } => {
+                    check::error_index(self, &compiler);
+                }
+                CheckRMake { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-make", "run-make")
+                }
+                CheckCrateStd { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Libstd)
+                }
+                CheckCrateTest { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Libtest)
+                }
+                CheckCrateRustc { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Librustc)
+                }
+
+                DistDocs { stage } => dist::docs(self, stage, target.target),
+                DistMingw { _dummy } => dist::mingw(self, target.target),
+                DistRustc { stage } => dist::rustc(self, stage, target.target),
+                DistStd { compiler } => dist::std(self, &compiler, target.target),
+
+                DebuggerScripts { stage } => {
+                    let compiler = Compiler::new(stage, target.target);
+                    dist::debugger_scripts(self,
+                                           &self.sysroot(&compiler),
+                                           target.target);
+                }
+
+                AndroidCopyLibs { compiler } => {
+                    check::android_copy_libs(self, &compiler, target.target);
+                }
+
+                // pseudo-steps
+                Dist { .. } |
+                Doc { .. } |
+                CheckTarget { .. } |
+                Check { .. } => {}
+            }
+        }
+    }
+
+    /// Updates all git submodules that we have.
+    ///
+    /// This will detect if any submodules are out of date an run the necessary
+    /// commands to sync them all with upstream.
+    fn update_submodules(&self) {
+        if !self.config.submodules {
+            return
+        }
+        if fs::metadata(self.src.join(".git")).is_err() {
+            return
+        }
+        let git_submodule = || {
+            let mut cmd = Command::new("git");
+            cmd.current_dir(&self.src).arg("submodule");
+            return cmd
+        };
+
+        // FIXME: this takes a seriously long time to execute on Windows and a
+        //        nontrivial amount of time on Unix, we should have a better way
+        //        of detecting whether we need to run all the submodule commands
+        //        below.
+        let out = output(git_submodule().arg("status"));
+        if !out.lines().any(|l| l.starts_with("+") || l.starts_with("-")) {
+            return
+        }
+
+        self.run(git_submodule().arg("sync"));
+        self.run(git_submodule().arg("init"));
+        self.run(git_submodule().arg("update"));
+        self.run(git_submodule().arg("update").arg("--recursive"));
+        self.run(git_submodule().arg("status").arg("--recursive"));
+        self.run(git_submodule().arg("foreach").arg("--recursive")
+                                .arg("git").arg("clean").arg("-fdx"));
+        self.run(git_submodule().arg("foreach").arg("--recursive")
+                                .arg("git").arg("checkout").arg("."));
+    }
+
+    /// Clear out `dir` if `input` is newer.
+    ///
+    /// After this executes, it will also ensure that `dir` exists.
+    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
+        let stamp = dir.join(".stamp");
+        if mtime(&stamp) < mtime(input) {
+            self.verbose(&format!("Dirty - {}", dir.display()));
+            let _ = fs::remove_dir_all(dir);
+        }
+        t!(fs::create_dir_all(dir));
+        t!(File::create(stamp));
+    }
+
+    /// Prepares an invocation of `cargo` to be run.
+    ///
+    /// This will create a `Command` that represents a pending execution of
+    /// Cargo. This cargo will be configured to use `compiler` as the actual
+    /// rustc compiler, its output will be scoped by `mode`'s output directory,
+    /// it will pass the `--target` flag for the specified `target`, and will be
+    /// executing the Cargo command `cmd`.
+    fn cargo(&self,
+             compiler: &Compiler,
+             mode: Mode,
+             target: &str,
+             cmd: &str) -> Command {
+        let mut cargo = Command::new(&self.cargo);
+        let out_dir = self.stage_out(compiler, mode);
+        cargo.env("CARGO_TARGET_DIR", out_dir)
+             .arg(cmd)
+             .arg("-j").arg(self.jobs().to_string())
+             .arg("--target").arg(target);
+
+        let stage;
+        if compiler.stage == 0 && self.config.local_rebuild {
+            // Assume the local-rebuild rustc already has stage1 features.
+            stage = 1;
+        } else {
+            stage = compiler.stage;
+        }
+
+        // Customize the compiler we're running. Specify the compiler to cargo
+        // as our shim and then pass it some various options used to configure
+        // how the actual compiler itself is called.
+        //
+        // These variables are primarily all read by
+        // src/bootstrap/{rustc,rustdoc.rs}
+        cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
+             .env("RUSTC_REAL", self.compiler_path(compiler))
+             .env("RUSTC_STAGE", stage.to_string())
+             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
+             .env("RUSTC_CODEGEN_UNITS",
+                  self.config.rust_codegen_units.to_string())
+             .env("RUSTC_DEBUG_ASSERTIONS",
+                  self.config.rust_debug_assertions.to_string())
+             .env("RUSTC_SNAPSHOT", &self.rustc)
+             .env("RUSTC_SYSROOT", self.sysroot(compiler))
+             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
+             .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir())
+             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
+             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
+             .env("RUSTDOC_REAL", self.rustdoc(compiler))
+             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
+
+        self.add_bootstrap_key(compiler, &mut cargo);
+
+        // Specify some various options for build scripts used throughout
+        // the build.
+        //
+        // FIXME: the guard against msvc shouldn't need to be here
+        if !target.contains("msvc") {
+            cargo.env(format!("CC_{}", target), self.cc(target))
+                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
+                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
+        }
+
+        // If we're building for OSX, inform the compiler and the linker that
+        // we want to build a compiler runnable on 10.7
+        if target.contains("apple-darwin") {
+            cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7");
+        }
+
+        // Environment variables *required* needed throughout the build
+        //
+        // FIXME: should update code to not require this env var
+        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
+
+        if self.config.verbose || self.flags.verbose {
+            cargo.arg("-v");
+        }
+        if self.config.rust_optimize {
+            cargo.arg("--release");
+        }
+        return cargo
+    }
+
+    /// Get a path to the compiler specified.
+    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.rustc.clone()
+        } else {
+            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
+        }
+    }
+
+    /// Get the specified tool built by the specified compiler
+    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
+        self.cargo_out(compiler, Mode::Tool, compiler.host)
+            .join(exe(tool, compiler.host))
+    }
+
+    /// Get the `rustdoc` executable next to the specified compiler
+    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
+        let mut rustdoc = self.compiler_path(compiler);
+        rustdoc.pop();
+        rustdoc.push(exe("rustdoc", compiler.host));
+        return rustdoc
+    }
+
+    /// Get a `Command` which is ready to run `tool` in `stage` built for
+    /// `host`.
+    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
+        let mut cmd = Command::new(self.tool(&compiler, tool));
+        let host = compiler.host;
+        let paths = vec![
+            self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
+            self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
+            self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
+            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
+        ];
+        add_lib_path(paths, &mut cmd);
+        return cmd
+    }
+
+    /// Get the space-separated set of activated features for the standard
+    /// library.
+    fn std_features(&self) -> String {
+        let mut features = String::new();
+        if self.config.debug_jemalloc {
+            features.push_str(" debug-jemalloc");
+        }
+        if self.config.use_jemalloc {
+            features.push_str(" jemalloc");
+        }
+        return features
+    }
+
+    /// Get the space-separated set of activated features for the compiler.
+    fn rustc_features(&self) -> String {
+        let mut features = String::new();
+        if self.config.use_jemalloc {
+            features.push_str(" jemalloc");
+        }
+        return features
+    }
+
+    /// Component directory that Cargo will produce output into (e.g.
+    /// release/debug)
+    fn cargo_dir(&self) -> &'static str {
+        if self.config.rust_optimize {"release"} else {"debug"}
+    }
+
+    /// Returns the sysroot for the `compiler` specified that *this build system
+    /// generates*.
+    ///
+    /// That is, the sysroot for the stage0 compiler is not what the compiler
+    /// thinks it is by default, but it's the same as the default for stages
+    /// 1-3.
+    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.stage == 0 {
+            self.out.join(compiler.host).join("stage0-sysroot")
+        } else {
+            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
+        }
+    }
+
+    /// Returns the libdir where the standard library and other artifacts are
+    /// found for a compiler's sysroot.
+    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
+        self.sysroot(compiler).join("lib").join("rustlib")
+            .join(target).join("lib")
+    }
+
+    /// Returns the root directory for all output generated in a particular
+    /// stage when running with a particular host compiler.
+    ///
+    /// The mode indicates what the root directory is for.
+    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
+        let suffix = match mode {
+            Mode::Libstd => "-std",
+            Mode::Libtest => "-test",
+            Mode::Tool => "-tools",
+            Mode::Librustc => "-rustc",
+        };
+        self.out.join(compiler.host)
+                .join(format!("stage{}{}", compiler.stage, suffix))
+    }
+
+    /// Returns the root output directory for all Cargo output in a given stage,
+    /// running a particular comipler, wehther or not we're building the
+    /// standard library, and targeting the specified architecture.
+    fn cargo_out(&self,
+                 compiler: &Compiler,
+                 mode: Mode,
+                 target: &str) -> PathBuf {
+        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
+    }
+
+    /// Root output directory for LLVM compiled for `target`
+    ///
+    /// Note that if LLVM is configured externally then the directory returned
+    /// will likely be empty.
+    fn llvm_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("llvm")
+    }
+
+    /// Returns the path to `llvm-config` for the specified target.
+    ///
+    /// If a custom `llvm-config` was specified for target then that's returned
+    /// instead.
+    fn llvm_config(&self, target: &str) -> PathBuf {
+        let target_config = self.config.target_config.get(target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            s.clone()
+        } else {
+            self.llvm_out(&self.config.build).join("bin")
+                .join(exe("llvm-config", target))
+        }
+    }
+
+    /// Returns the path to `FileCheck` binary for the specified target
+    fn llvm_filecheck(&self, target: &str) -> PathBuf {
+        let target_config = self.config.target_config.get(target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            s.parent().unwrap().join(exe("FileCheck", target))
+        } else {
+            let base = self.llvm_out(&self.config.build).join("build");
+            let exe = exe("FileCheck", target);
+            if self.config.build.contains("msvc") {
+                base.join("Release/bin").join(exe)
+            } else {
+                base.join("bin").join(exe)
+            }
+        }
+    }
+
+    /// Root output directory for compiler-rt compiled for `target`
+    fn compiler_rt_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("compiler-rt")
+    }
+
+    /// Root output directory for rust_test_helpers library compiled for
+    /// `target`
+    fn test_helpers_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("rust-test-helpers")
+    }
+
+    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
+    /// library lookup path.
+    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
+        // Windows doesn't need dylib path munging because the dlls for the
+        // compiler live next to the compiler and the system will find them
+        // automatically.
+        if cfg!(windows) {
+            return
+        }
+
+        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
+    }
+
+    /// Adds the compiler's bootstrap key to the environment of `cmd`.
+    fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) {
+        // In stage0 we're using a previously released stable compiler, so we
+        // use the stage0 bootstrap key. Otherwise we use our own build's
+        // bootstrap key.
+        let bootstrap_key = if compiler.is_snapshot(self) && !self.config.local_rebuild {
+            &self.bootstrap_key_stage0
+        } else {
+            &self.bootstrap_key
+        };
+        cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key);
+    }
+
+    /// Returns the compiler's libdir where it stores the dynamic libraries that
+    /// it itself links against.
+    ///
+    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
+    /// Windows.
+    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.rustc_snapshot_libdir()
+        } else {
+            self.sysroot(compiler).join(libdir(compiler.host))
+        }
+    }
+
+    /// Returns the libdir of the snapshot compiler.
+    fn rustc_snapshot_libdir(&self) -> PathBuf {
+        self.rustc.parent().unwrap().parent().unwrap()
+            .join(libdir(&self.config.build))
+    }
+
+    /// Runs a command, printing out nice contextual information if it fails.
+    fn run(&self, cmd: &mut Command) {
+        self.verbose(&format!("running: {:?}", cmd));
+        run_silent(cmd)
+    }
+
+    /// Prints a message if this build is configured in verbose mode.
+    fn verbose(&self, msg: &str) {
+        if self.flags.verbose || self.config.verbose {
+            println!("{}", msg);
+        }
+    }
+
+    /// Returns the number of parallel jobs that have been configured for this
+    /// build.
+    fn jobs(&self) -> u32 {
+        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
+    }
+
+    /// Returns the path to the C compiler for the target specified.
+    fn cc(&self, target: &str) -> &Path {
+        self.cc[target].0.path()
+    }
+
+    /// Returns a list of flags to pass to the C compiler for the target
+    /// specified.
+    fn cflags(&self, target: &str) -> Vec<String> {
+        // Filter out -O and /O (the optimization flags) that we picked up from
+        // gcc-rs because the build scripts will determine that for themselves.
+        let mut base = self.cc[target].0.args().iter()
+                           .map(|s| s.to_string_lossy().into_owned())
+                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
+                           .collect::<Vec<_>>();
+
+        // If we're compiling on OSX then we add a few unconditional flags
+        // indicating that we want libc++ (more filled out than libstdc++) and
+        // we want to compile for 10.7. This way we can ensure that
+        // LLVM/jemalloc/etc are all properly compiled.
+        if target.contains("apple-darwin") {
+            base.push("-stdlib=libc++".into());
+            base.push("-mmacosx-version-min=10.7".into());
+        }
+        return base
+    }
+
+    /// Returns the path to the `ar` archive utility for the target specified.
+    fn ar(&self, target: &str) -> Option<&Path> {
+        self.cc[target].1.as_ref().map(|p| &**p)
+    }
+
+    /// Returns the path to the C++ compiler for the target specified, may panic
+    /// if no C++ compiler was configured for the target.
+    fn cxx(&self, target: &str) -> &Path {
+        self.cxx[target].path()
+    }
+
+    /// Returns flags to pass to the compiler to generate code for `target`.
+    fn rustc_flags(&self, target: &str) -> Vec<String> {
+        // New flags should be added here with great caution!
+        //
+        // It's quite unfortunate to **require** flags to generate code for a
+        // target, so it should only be passed here if absolutely necessary!
+        // Most default configuration should be done through target specs rather
+        // than an entry here.
+
+        let mut base = Vec::new();
+        if target != self.config.build && !target.contains("msvc") {
+            base.push(format!("-Clinker={}", self.cc(target).display()));
+        }
+        return base
     }
 }
 
-/// Parses the `dylib_path_var()` environment variable, returning a list of
-/// paths that are members of this lookup path.
-pub fn dylib_path() -> Vec<PathBuf> {
-    env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
-        .collect()
+impl<'a> Compiler<'a> {
+    /// Creates a new complier for the specified stage/host
+    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
+        Compiler { stage: stage, host: host }
+    }
+
+    /// Returns whether this is a snapshot compiler for `build`'s configuration
+    fn is_snapshot(&self, build: &Build) -> bool {
+        self.stage == 0 && self.host == build.config.build
+    }
 }
diff --git a/src/bootstrap/main.rs b/src/bootstrap/main.rs
deleted file mode 100644 (file)
index 18d03b5..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! rustbuild, the Rust build system
-//!
-//! This is the entry point for the build system used to compile the `rustc`
-//! compiler. Lots of documentation can be found in the `README.md` file next to
-//! this file, and otherwise documentation can be found throughout the `build`
-//! directory in each respective module.
-
-#![deny(warnings)]
-
-extern crate bootstrap;
-extern crate build_helper;
-extern crate cmake;
-extern crate filetime;
-extern crate gcc;
-extern crate getopts;
-extern crate libc;
-extern crate num_cpus;
-extern crate rustc_serialize;
-extern crate toml;
-extern crate md5;
-
-use std::env;
-
-use build::{Flags, Config, Build};
-
-mod build;
-
-fn main() {
-    let args = env::args().skip(1).collect::<Vec<_>>();
-    let flags = Flags::parse(&args);
-    let mut config = Config::parse(&flags.build, flags.config.clone());
-
-    // compat with `./configure` while we're still using that
-    if std::fs::metadata("config.mk").is_ok() {
-        config.update_with_config_mk();
-    }
-
-    Build::new(flags, config).build();
-}
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
new file mode 100644 (file)
index 0000000..83e9393
--- /dev/null
@@ -0,0 +1,238 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Compilation of native dependencies like LLVM.
+//!
+//! Native projects like LLVM unfortunately aren't suited just yet for
+//! compilation in build scripts that Cargo has. This is because thie
+//! compilation takes a *very* long time but also because we don't want to
+//! compile LLVM 3 times as part of a normal bootstrap (we want it cached).
+//!
+//! LLVM and compiler-rt are essentially just wired up to everything else to
+//! ensure that they're always in place if needed.
+
+use std::path::Path;
+use std::process::Command;
+use std::fs::{self, File};
+
+use build_helper::output;
+use cmake;
+use gcc;
+
+use Build;
+use util::{staticlib, up_to_date};
+
+/// Compile LLVM for `target`.
+pub fn llvm(build: &Build, target: &str) {
+    // If we're using a custom LLVM bail out here, but we can only use a
+    // custom LLVM for the build triple.
+    if let Some(config) = build.config.target_config.get(target) {
+        if let Some(ref s) = config.llvm_config {
+            return check_llvm_version(build, s);
+        }
+    }
+
+    // If the cleaning trigger is newer than our built artifacts (or if the
+    // artifacts are missing) then we keep going, otherwise we bail out.
+    let dst = build.llvm_out(target);
+    let stamp = build.src.join("src/rustllvm/llvm-auto-clean-trigger");
+    let done_stamp = dst.join("llvm-finished-building");
+    build.clear_if_dirty(&dst, &stamp);
+    if fs::metadata(&done_stamp).is_ok() {
+        return
+    }
+
+    println!("Building LLVM for {}", target);
+
+    let _ = fs::remove_dir_all(&dst.join("build"));
+    t!(fs::create_dir_all(&dst.join("build")));
+    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
+
+    // http://llvm.org/docs/CMake.html
+    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
+    if build.config.ninja {
+        cfg.generator("Ninja");
+    }
+    cfg.target(target)
+       .host(&build.config.build)
+       .out_dir(&dst)
+       .profile(if build.config.llvm_optimize {"Release"} else {"Debug"})
+       .define("LLVM_ENABLE_ASSERTIONS", assertions)
+       .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC")
+       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
+       .define("LLVM_INCLUDE_TESTS", "OFF")
+       .define("LLVM_INCLUDE_DOCS", "OFF")
+       .define("LLVM_ENABLE_ZLIB", "OFF")
+       .define("WITH_POLLY", "OFF")
+       .define("LLVM_ENABLE_TERMINFO", "OFF")
+       .define("LLVM_ENABLE_LIBEDIT", "OFF")
+       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string());
+
+    if target.starts_with("i686") {
+        cfg.define("LLVM_BUILD_32_BITS", "ON");
+    }
+
+    // http://llvm.org/docs/HowToCrossCompileLLVM.html
+    if target != build.config.build {
+        // FIXME: if the llvm root for the build triple is overridden then we
+        //        should use llvm-tblgen from there, also should verify that it
+        //        actually exists most of the time in normal installs of LLVM.
+        let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
+        cfg.define("CMAKE_CROSSCOMPILING", "True")
+           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
+           .define("LLVM_TABLEGEN", &host)
+           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
+    }
+
+    // MSVC handles compiler business itself
+    if !target.contains("msvc") {
+        if build.config.ccache {
+           cfg.define("CMAKE_C_COMPILER", "ccache")
+              .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
+              .define("CMAKE_CXX_COMPILER", "ccache")
+              .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
+        } else {
+           cfg.define("CMAKE_C_COMPILER", build.cc(target))
+              .define("CMAKE_CXX_COMPILER", build.cxx(target));
+        }
+        cfg.build_arg("-j").build_arg(build.jobs().to_string());
+
+        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
+        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
+    }
+
+    // FIXME: we don't actually need to build all LLVM tools and all LLVM
+    //        libraries here, e.g. we just want a few components and a few
+    //        tools. Figure out how to filter them down and only build the right
+    //        tools and libs on all platforms.
+    cfg.build();
+
+    t!(File::create(&done_stamp));
+}
+
+fn check_llvm_version(build: &Build, llvm_config: &Path) {
+    if !build.config.llvm_version_check {
+        return
+    }
+
+    let mut cmd = Command::new(llvm_config);
+    let version = output(cmd.arg("--version"));
+    if version.starts_with("3.5") || version.starts_with("3.6") ||
+       version.starts_with("3.7") {
+        return
+    }
+    panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
+}
+
+/// Compiles the `compiler-rt` library, or at least the builtins part of it.
+///
+/// This uses the CMake build system and an existing LLVM build directory to
+/// compile the project.
+pub fn compiler_rt(build: &Build, target: &str) {
+    let dst = build.compiler_rt_out(target);
+    let arch = target.split('-').next().unwrap();
+    let mode = if build.config.rust_optimize {"Release"} else {"Debug"};
+
+    let build_llvm_config = build.llvm_config(&build.config.build);
+    let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt"));
+    cfg.target(target)
+       .host(&build.config.build)
+       .out_dir(&dst)
+       .profile(mode)
+       .define("LLVM_CONFIG_PATH", build_llvm_config)
+       .define("COMPILER_RT_DEFAULT_TARGET_TRIPLE", target)
+       .define("COMPILER_RT_BUILD_SANITIZERS", "OFF")
+       .define("COMPILER_RT_BUILD_EMUTLS", "OFF")
+       // inform about c/c++ compilers, the c++ compiler isn't actually used but
+       // it's needed to get the initial configure to work on all platforms.
+       .define("CMAKE_C_COMPILER", build.cc(target))
+       .define("CMAKE_CXX_COMPILER", build.cc(target));
+
+    let (dir, build_target, libname) = if target.contains("linux") ||
+                                          target.contains("freebsd") ||
+                                          target.contains("netbsd") {
+        let os_extra = if target.contains("android") && target.contains("arm") {
+            "-android"
+        } else {
+            ""
+        };
+        let builtins_arch = match arch {
+            "i586" => "i386",
+            "arm" | "armv7" if target.contains("android") => "armhf",
+            "arm" if target.contains("eabihf") => "armhf",
+            _ => arch,
+        };
+        let target = format!("clang_rt.builtins-{}", builtins_arch);
+        ("linux".to_string(),
+         target.clone(),
+         format!("{}{}", target, os_extra))
+    } else if target.contains("apple-darwin") {
+        let builtins_arch = match arch {
+            "i686" => "i386",
+            _ => arch,
+        };
+        let target = format!("clang_rt.builtins_{}_osx", builtins_arch);
+        ("builtins".to_string(), target.clone(), target)
+    } else if target.contains("apple-ios") {
+        cfg.define("COMPILER_RT_ENABLE_IOS", "ON");
+        let target = match arch {
+            "armv7s" => "hard_pic_armv7em_macho_embedded".to_string(),
+            "aarch64" => "builtins_arm64_ios".to_string(),
+            _ => format!("hard_pic_{}_macho_embedded", arch),
+        };
+        ("builtins".to_string(), target.clone(), target)
+    } else if target.contains("windows-gnu") {
+        let target = format!("clang_rt.builtins-{}", arch);
+        ("windows".to_string(), target.clone(), target)
+    } else if target.contains("windows-msvc") {
+        let builtins_arch = match arch {
+            "i586" | "i686" => "i386",
+            _ => arch,
+        };
+        (format!("windows/{}", mode),
+         "lib/builtins/builtins".to_string(),
+         format!("clang_rt.builtins-{}", builtins_arch))
+    } else {
+        panic!("can't get os from target: {}", target)
+    };
+    let output = dst.join("build/lib").join(dir)
+                    .join(staticlib(&libname, target));
+    build.compiler_rt_built.borrow_mut().insert(target.to_string(),
+                                                output.clone());
+    if fs::metadata(&output).is_ok() {
+        return
+    }
+    let _ = fs::remove_dir_all(&dst);
+    t!(fs::create_dir_all(&dst));
+    cfg.build_target(&build_target);
+    cfg.build();
+}
+
+/// Compiles the `rust_test_helpers.c` library which we used in various
+/// `run-pass` test suites for ABI testing.
+pub fn test_helpers(build: &Build, target: &str) {
+    let dst = build.test_helpers_out(target);
+    let src = build.src.join("src/rt/rust_test_helpers.c");
+    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
+        return
+    }
+
+    println!("Building test helpers");
+    t!(fs::create_dir_all(&dst));
+    let mut cfg = gcc::Config::new();
+    cfg.cargo_metadata(false)
+       .out_dir(&dst)
+       .target(target)
+       .host(&build.config.build)
+       .opt_level(0)
+       .debug(false)
+       .file(build.src.join("src/rt/rust_test_helpers.c"))
+       .compile("librust_test_helpers.a");
+}
diff --git a/src/bootstrap/rustc.rs b/src/bootstrap/rustc.rs
deleted file mode 100644 (file)
index 97deced..0000000
+++ /dev/null
@@ -1,164 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
-//!
-//! This shim will take care of some various tasks that our build process
-//! requires that Cargo can't quite do through normal configuration:
-//!
-//! 1. When compiling build scripts and build dependencies, we need a guaranteed
-//!    full standard library available. The only compiler which actually has
-//!    this is the snapshot, so we detect this situation and always compile with
-//!    the snapshot compiler.
-//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
-//!    (and this slightly differs based on a whether we're using a snapshot or
-//!    not), so we do that all here.
-//!
-//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
-//! switching compilers for the bootstrap and for build scripts will probably
-//! never get replaced.
-
-extern crate bootstrap;
-
-use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-use std::process::Command;
-
-fn main() {
-    let args = env::args_os().skip(1).collect::<Vec<_>>();
-    // Detect whether or not we're a build script depending on whether --target
-    // is passed (a bit janky...)
-    let target = args.windows(2).find(|w| &*w[0] == "--target")
-                                .and_then(|w| w[1].to_str());
-
-    // Build scripts always use the snapshot compiler which is guaranteed to be
-    // able to produce an executable, whereas intermediate compilers may not
-    // have the standard library built yet and may not be able to produce an
-    // executable. Otherwise we just use the standard compiler we're
-    // bootstrapping with.
-    let (rustc, libdir) = if target.is_none() {
-        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
-    } else {
-        ("RUSTC_REAL", "RUSTC_LIBDIR")
-    };
-    let stage = env::var("RUSTC_STAGE").unwrap();
-
-    let rustc = env::var_os(rustc).unwrap();
-    let libdir = env::var_os(libdir).unwrap();
-    let mut dylib_path = bootstrap::dylib_path();
-    dylib_path.insert(0, PathBuf::from(libdir));
-
-    let mut cmd = Command::new(rustc);
-    cmd.args(&args)
-       .arg("--cfg").arg(format!("stage{}", stage))
-       .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
-    if let Some(target) = target {
-        // The stage0 compiler has a special sysroot distinct from what we
-        // actually downloaded, so we just always pass the `--sysroot` option.
-        cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap());
-
-        // When we build Rust dylibs they're all intended for intermediate
-        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
-        // linking all deps statically into the dylib.
-        cmd.arg("-Cprefer-dynamic");
-
-        // Help the libc crate compile by assisting it in finding the MUSL
-        // native libraries.
-        if let Some(s) = env::var_os("MUSL_ROOT") {
-            let mut root = OsString::from("native=");
-            root.push(&s);
-            root.push("/lib");
-            cmd.arg("-L").arg(&root);
-        }
-
-        // Pass down extra flags, commonly used to configure `-Clinker` when
-        // cross compiling.
-        if let Ok(s) = env::var("RUSTC_FLAGS") {
-            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
-        }
-
-        // If we're compiling specifically the `panic_abort` crate then we pass
-        // the `-C panic=abort` option. Note that we do not do this for any
-        // other crate intentionally as this is the only crate for now that we
-        // ship with panic=abort.
-        //
-        // This... is a bit of a hack how we detect this. Ideally this
-        // information should be encoded in the crate I guess? Would likely
-        // require an RFC amendment to RFC 1513, however.
-        let is_panic_abort = args.windows(2).any(|a| {
-            &*a[0] == "--crate-name" && &*a[1] == "panic_abort"
-        });
-        // FIXME(stage0): remove this `stage != "0"` condition
-        if is_panic_abort && stage != "0" {
-            cmd.arg("-C").arg("panic=abort");
-        }
-
-        // Set various options from config.toml to configure how we're building
-        // code.
-        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
-            cmd.arg("-g");
-        }
-        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
-            Ok(s) => if s == "true" {"y"} else {"n"},
-            Err(..) => "n",
-        };
-        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
-        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
-            cmd.arg("-C").arg(format!("codegen-units={}", s));
-        }
-
-        // Dealing with rpath here is a little special, so let's go into some
-        // detail. First off, `-rpath` is a linker option on Unix platforms
-        // which adds to the runtime dynamic loader path when looking for
-        // dynamic libraries. We use this by default on Unix platforms to ensure
-        // that our nightlies behave the same on Windows, that is they work out
-        // of the box. This can be disabled, of course, but basically that's why
-        // we're gated on RUSTC_RPATH here.
-        //
-        // Ok, so the astute might be wondering "why isn't `-C rpath` used
-        // here?" and that is indeed a good question to task. This codegen
-        // option is the compiler's current interface to generating an rpath.
-        // Unfortunately it doesn't quite suffice for us. The flag currently
-        // takes no value as an argument, so the compiler calculates what it
-        // should pass to the linker as `-rpath`. This unfortunately is based on
-        // the **compile time** directory structure which when building with
-        // Cargo will be very different than the runtime directory structure.
-        //
-        // All that's a really long winded way of saying that if we use
-        // `-Crpath` then the executables generated have the wrong rpath of
-        // something like `$ORIGIN/deps` when in fact the way we distribute
-        // rustc requires the rpath to be `$ORIGIN/../lib`.
-        //
-        // So, all in all, to set up the correct rpath we pass the linker
-        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
-        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
-        // to change a flag in a binary?
-        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
-            let rpath = if target.contains("apple") {
-                Some("-Wl,-rpath,@loader_path/../lib")
-            } else if !target.contains("windows") {
-                Some("-Wl,-rpath,$ORIGIN/../lib")
-            } else {
-                None
-            };
-            if let Some(rpath) = rpath {
-                cmd.arg("-C").arg(format!("link-args={}", rpath));
-            }
-        }
-    }
-
-    // Actually run the compiler!
-    std::process::exit(match cmd.status() {
-        Ok(s) => s.code().unwrap_or(1),
-        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
-    })
-}
diff --git a/src/bootstrap/rustdoc.rs b/src/bootstrap/rustdoc.rs
deleted file mode 100644 (file)
index 88ac26d..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
-//!
-//! See comments in `src/bootstrap/rustc.rs` for more information.
-
-extern crate bootstrap;
-
-use std::env;
-use std::process::Command;
-use std::path::PathBuf;
-
-fn main() {
-    let args = env::args_os().skip(1).collect::<Vec<_>>();
-    let rustdoc = env::var_os("RUSTDOC_REAL").unwrap();
-    let libdir = env::var_os("RUSTC_LIBDIR").unwrap();
-
-    let mut dylib_path = bootstrap::dylib_path();
-    dylib_path.insert(0, PathBuf::from(libdir));
-
-    let mut cmd = Command::new(rustdoc);
-    cmd.args(&args)
-       .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap()))
-       .arg("--cfg").arg("dox")
-       .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-    std::process::exit(match cmd.status() {
-        Ok(s) => s.code().unwrap_or(1),
-        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
-    })
-}
-
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
new file mode 100644 (file)
index 0000000..7c0f09c
--- /dev/null
@@ -0,0 +1,172 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Sanity checking performed by rustbuild before actually executing anything.
+//!
+//! This module contains the implementation of ensuring that the build
+//! environment looks reasonable before progressing. This will verify that
+//! various programs like git and python exist, along with ensuring that all C
+//! compilers for cross-compiling are found.
+//!
+//! In theory if we get past this phase it's a bug if a build fails, but in
+//! practice that's likely not true!
+
+use std::collections::HashSet;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs;
+use std::process::Command;
+
+use build_helper::output;
+
+use Build;
+
+pub fn check(build: &mut Build) {
+    let mut checked = HashSet::new();
+    let path = env::var_os("PATH").unwrap_or(OsString::new());
+    let mut need_cmd = |cmd: &OsStr| {
+        if !checked.insert(cmd.to_owned()) {
+            return
+        }
+        for path in env::split_paths(&path).map(|p| p.join(cmd)) {
+            if fs::metadata(&path).is_ok() ||
+               fs::metadata(path.with_extension("exe")).is_ok() {
+                return
+            }
+        }
+        panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
+    };
+
+    // If we've got a git directory we're gona need git to update
+    // submodules and learn about various other aspects.
+    if fs::metadata(build.src.join(".git")).is_ok() {
+        need_cmd("git".as_ref());
+    }
+
+    // We need cmake, but only if we're actually building LLVM
+    for host in build.config.host.iter() {
+        if let Some(config) = build.config.target_config.get(host) {
+            if config.llvm_config.is_some() {
+                continue
+            }
+        }
+        need_cmd("cmake".as_ref());
+        if build.config.ninja {
+            need_cmd("ninja".as_ref())
+        }
+        break
+    }
+
+    need_cmd("python".as_ref());
+
+    // We're gonna build some custom C code here and there, host triples
+    // also build some C++ shims for LLVM so we need a C++ compiler.
+    for target in build.config.target.iter() {
+        need_cmd(build.cc(target).as_ref());
+        if let Some(ar) = build.ar(target) {
+            need_cmd(ar.as_ref());
+        }
+    }
+    for host in build.config.host.iter() {
+        need_cmd(build.cxx(host).as_ref());
+    }
+
+    // Externally configured LLVM requires FileCheck to exist
+    let filecheck = build.llvm_filecheck(&build.config.build);
+    if !filecheck.starts_with(&build.out) && !filecheck.exists() {
+        panic!("filecheck executable {:?} does not exist", filecheck);
+    }
+
+    for target in build.config.target.iter() {
+        // Either can't build or don't want to run jemalloc on these targets
+        if target.contains("rumprun") ||
+           target.contains("bitrig") ||
+           target.contains("openbsd") ||
+           target.contains("msvc") {
+            build.config.use_jemalloc = false;
+        }
+
+        // Can't compile for iOS unless we're on OSX
+        if target.contains("apple-ios") &&
+           !build.config.build.contains("apple-darwin") {
+            panic!("the iOS target is only supported on OSX");
+        }
+
+        // Make sure musl-root is valid if specified
+        if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) {
+            match build.config.musl_root {
+                Some(ref root) => {
+                    if fs::metadata(root.join("lib/libc.a")).is_err() {
+                        panic!("couldn't find libc.a in musl dir: {}",
+                               root.join("lib").display());
+                    }
+                    if fs::metadata(root.join("lib/libunwind.a")).is_err() {
+                        panic!("couldn't find libunwind.a in musl dir: {}",
+                               root.join("lib").display());
+                    }
+                }
+                None => {
+                    panic!("when targeting MUSL the build.musl-root option \
+                            must be specified in config.toml")
+                }
+            }
+        }
+
+        if target.contains("msvc") {
+            // There are three builds of cmake on windows: MSVC, MinGW, and
+            // Cygwin. The Cygwin build does not have generators for Visual
+            // Studio, so detect that here and error.
+            let out = output(Command::new("cmake").arg("--help"));
+            if !out.contains("Visual Studio") {
+                panic!("
+cmake does not support Visual Studio generators.
+
+This is likely due to it being an msys/cygwin build of cmake,
+rather than the required windows version, built using MinGW
+or Visual Studio.
+
+If you are building under msys2 try installing the mingw-w64-x86_64-cmake
+package instead of cmake:
+
+$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
+");
+            }
+        }
+
+        if target.contains("arm-linux-android") {
+            need_cmd("adb".as_ref());
+        }
+    }
+
+    for host in build.flags.host.iter() {
+        if !build.config.host.contains(host) {
+            panic!("specified host `{}` is not in the ./configure list", host);
+        }
+    }
+    for target in build.flags.target.iter() {
+        if !build.config.target.contains(target) {
+            panic!("specified target `{}` is not in the ./configure list",
+                   target);
+        }
+    }
+
+    let run = |cmd: &mut Command| {
+        cmd.output().map(|output| {
+            String::from_utf8_lossy(&output.stdout)
+                   .lines().next().unwrap()
+                   .to_string()
+        })
+    };
+    build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
+    build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
+    if build.lldb_version.is_some() {
+        build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
+    }
+}
diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs
new file mode 100644 (file)
index 0000000..4b3be04
--- /dev/null
@@ -0,0 +1,590 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Major workhorse of rustbuild, definition and dependencies between stages of
+//! the copmile.
+//!
+//! The primary purpose of this module is to define the various `Step`s of
+//! execution of the build. Each `Step` has a corresponding `Source` indicating
+//! what it's actually doing along with a number of dependencies which must be
+//! executed first.
+//!
+//! This module will take the CLI as input and calculate the steps required for
+//! the build requested, ensuring that all intermediate pieces are in place.
+//! Essentially this module is a `make`-replacement, but not as good.
+
+use std::collections::HashSet;
+
+use {Build, Compiler};
+
+#[derive(Hash, Eq, PartialEq, Clone, Debug)]
+pub struct Step<'a> {
+    pub src: Source<'a>,
+    pub target: &'a str,
+}
+
+/// Macro used to iterate over all targets that are recognized by the build
+/// system.
+///
+/// Whenever a new step is added it will involve adding an entry here, updating
+/// the dependencies section below, and then adding an implementation of the
+/// step in `build/mod.rs`.
+///
+/// This macro takes another macro as an argument and then calls that macro with
+/// all steps that the build system knows about.
+macro_rules! targets {
+    ($m:ident) => {
+        $m! {
+            // Step representing building the stageN compiler. This is just the
+            // compiler executable itself, not any of the support libraries
+            (rustc, Rustc { stage: u32 }),
+
+            // Steps for the two main cargo builds. These are parameterized over
+            // the compiler which is producing the artifact.
+            (libstd, Libstd { compiler: Compiler<'a> }),
+            (libtest, Libtest { compiler: Compiler<'a> }),
+            (librustc, Librustc { compiler: Compiler<'a> }),
+
+            // Links the target produced by the compiler provided into the
+            // host's directory also provided.
+            (libstd_link, LibstdLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+            (libtest_link, LibtestLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+            (librustc_link, LibrustcLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+
+            // Various tools that we can build as part of the build.
+            (tool_linkchecker, ToolLinkchecker { stage: u32 }),
+            (tool_rustbook, ToolRustbook { stage: u32 }),
+            (tool_error_index, ToolErrorIndex { stage: u32 }),
+            (tool_cargotest, ToolCargoTest { stage: u32 }),
+            (tool_tidy, ToolTidy { stage: u32 }),
+            (tool_compiletest, ToolCompiletest { stage: u32 }),
+
+            // Steps for long-running native builds. Ideally these wouldn't
+            // actually exist and would be part of build scripts, but for now
+            // these are here.
+            //
+            // There aren't really any parameters to this, but empty structs
+            // with braces are unstable so we just pick something that works.
+            (llvm, Llvm { _dummy: () }),
+            (compiler_rt, CompilerRt { _dummy: () }),
+            (test_helpers, TestHelpers { _dummy: () }),
+            (debugger_scripts, DebuggerScripts { stage: u32 }),
+
+            // Steps for various pieces of documentation that we can generate,
+            // the 'doc' step is just a pseudo target to depend on a bunch of
+            // others.
+            (doc, Doc { stage: u32 }),
+            (doc_book, DocBook { stage: u32 }),
+            (doc_nomicon, DocNomicon { stage: u32 }),
+            (doc_style, DocStyle { stage: u32 }),
+            (doc_standalone, DocStandalone { stage: u32 }),
+            (doc_std, DocStd { stage: u32 }),
+            (doc_test, DocTest { stage: u32 }),
+            (doc_rustc, DocRustc { stage: u32 }),
+            (doc_error_index, DocErrorIndex { stage: u32 }),
+
+            // Steps for running tests. The 'check' target is just a pseudo
+            // target to depend on a bunch of others.
+            (check, Check { stage: u32, compiler: Compiler<'a> }),
+            (check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
+            (check_linkcheck, CheckLinkcheck { stage: u32 }),
+            (check_cargotest, CheckCargoTest { stage: u32 }),
+            (check_tidy, CheckTidy { stage: u32 }),
+            (check_rpass, CheckRPass { compiler: Compiler<'a> }),
+            (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
+            (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
+            (check_rfail, CheckRFail { compiler: Compiler<'a> }),
+            (check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }),
+            (check_cfail, CheckCFail { compiler: Compiler<'a> }),
+            (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
+            (check_pfail, CheckPFail { compiler: Compiler<'a> }),
+            (check_pretty, CheckPretty { compiler: Compiler<'a> }),
+            (check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }),
+            (check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }),
+            (check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }),
+            (check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }),
+            (check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }),
+            (check_codegen, CheckCodegen { compiler: Compiler<'a> }),
+            (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
+            (check_incremental, CheckIncremental { compiler: Compiler<'a> }),
+            (check_ui, CheckUi { compiler: Compiler<'a> }),
+            (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
+            (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
+            (check_docs, CheckDocs { compiler: Compiler<'a> }),
+            (check_error_index, CheckErrorIndex { compiler: Compiler<'a> }),
+            (check_rmake, CheckRMake { compiler: Compiler<'a> }),
+            (check_crate_std, CheckCrateStd { compiler: Compiler<'a> }),
+            (check_crate_test, CheckCrateTest { compiler: Compiler<'a> }),
+            (check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }),
+
+            // Distribution targets, creating tarballs
+            (dist, Dist { stage: u32 }),
+            (dist_docs, DistDocs { stage: u32 }),
+            (dist_mingw, DistMingw { _dummy: () }),
+            (dist_rustc, DistRustc { stage: u32 }),
+            (dist_std, DistStd { compiler: Compiler<'a> }),
+
+            // Misc targets
+            (android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
+        }
+    }
+}
+
+// Define the `Source` enum by iterating over all the steps and peeling out just
+// the types that we want to define.
+
+macro_rules! item { ($a:item) => ($a) }
+
+macro_rules! define_source {
+    ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {
+        item! {
+            #[derive(Hash, Eq, PartialEq, Clone, Debug)]
+            pub enum Source<'a> {
+                $($name { $($args)* }),*
+            }
+        }
+    }
+}
+
+targets!(define_source);
+
+/// Calculate a list of all steps described by `build`.
+///
+/// This will inspect the flags passed in on the command line and use that to
+/// build up a list of steps to execute. These steps will then be transformed
+/// into a topologically sorted list which when executed left-to-right will
+/// correctly sequence the entire build.
+pub fn all(build: &Build) -> Vec<Step> {
+    let mut ret = Vec::new();
+    let mut all = HashSet::new();
+    for target in top_level(build) {
+        fill(build, &target, &mut ret, &mut all);
+    }
+    return ret;
+
+    fn fill<'a>(build: &'a Build,
+                target: &Step<'a>,
+                ret: &mut Vec<Step<'a>>,
+                set: &mut HashSet<Step<'a>>) {
+        if set.insert(target.clone()) {
+            for dep in target.deps(build) {
+                fill(build, &dep, ret, set);
+            }
+            ret.push(target.clone());
+        }
+    }
+}
+
+/// Determines what top-level targets are requested as part of this build,
+/// returning them as a list.
+fn top_level(build: &Build) -> Vec<Step> {
+    let mut targets = Vec::new();
+    let stage = build.flags.stage.unwrap_or(2);
+
+    let host = Step {
+        src: Source::Llvm { _dummy: () },
+        target: build.flags.host.iter().next()
+                     .unwrap_or(&build.config.build),
+    };
+    let target = Step {
+        src: Source::Llvm { _dummy: () },
+        target: build.flags.target.iter().next().map(|x| &x[..])
+                     .unwrap_or(host.target)
+    };
+
+    // First, try to find steps on the command line.
+    add_steps(build, stage, &host, &target, &mut targets);
+
+    // If none are specified, then build everything.
+    if targets.len() == 0 {
+        let t = Step {
+            src: Source::Llvm { _dummy: () },
+            target: &build.config.build,
+        };
+        if build.config.docs {
+          targets.push(t.doc(stage));
+        }
+        for host in build.config.host.iter() {
+            if !build.flags.host.contains(host) {
+                continue
+            }
+            let host = t.target(host);
+            if host.target == build.config.build {
+                targets.push(host.librustc(host.compiler(stage)));
+            } else {
+                targets.push(host.librustc_link(t.compiler(stage), host.target));
+            }
+            for target in build.config.target.iter() {
+                if !build.flags.target.contains(target) {
+                    continue
+                }
+
+                if host.target == build.config.build {
+                    targets.push(host.target(target)
+                                     .libtest(host.compiler(stage)));
+                } else {
+                    targets.push(host.target(target)
+                                     .libtest_link(t.compiler(stage), host.target));
+                }
+            }
+        }
+    }
+
+    return targets
+
+}
+
+fn add_steps<'a>(build: &'a Build,
+                 stage: u32,
+                 host: &Step<'a>,
+                 target: &Step<'a>,
+                 targets: &mut Vec<Step<'a>>) {
+    struct Context<'a> {
+        stage: u32,
+        compiler: Compiler<'a>,
+        _dummy: (),
+        host: &'a str,
+    }
+    for step in build.flags.step.iter() {
+
+        // The macro below insists on hygienic access to all local variables, so
+        // we shove them all in a struct and subvert hygiene by accessing struct
+        // fields instead,
+        let cx = Context {
+            stage: stage,
+            compiler: host.target(&build.config.build).compiler(stage),
+            _dummy: (),
+            host: host.target,
+        };
+        macro_rules! add_step {
+            ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(
+                let name = stringify!($short).replace("_", "-");
+                if &step[..] == &name[..] {
+                    targets.push(target.$short($(cx.$arg),*));
+                    continue
+                }
+                drop(name);
+            )*})
+        }
+
+        targets!(add_step);
+
+        panic!("unknown step: {}", step);
+    }
+}
+
+macro_rules! constructors {
+    ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(
+        fn $short(&self, $($arg: $t),*) -> Step<'a> {
+            Step {
+                src: Source::$name { $($arg: $arg),* },
+                target: self.target,
+            }
+        }
+    )*}
+}
+
+impl<'a> Step<'a> {
+    fn compiler(&self, stage: u32) -> Compiler<'a> {
+        Compiler::new(stage, self.target)
+    }
+
+    fn target(&self, target: &'a str) -> Step<'a> {
+        Step { target: target, src: self.src.clone() }
+    }
+
+    // Define ergonomic constructors for each step defined above so they can be
+    // easily constructed.
+    targets!(constructors);
+
+    /// Mapping of all dependencies for rustbuild.
+    ///
+    /// This function receives a step, the build that we're building for, and
+    /// then returns a list of all the dependencies of that step.
+    pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {
+        match self.src {
+            Source::Rustc { stage: 0 } => {
+                Vec::new()
+            }
+            Source::Rustc { stage } => {
+                let compiler = Compiler::new(stage - 1, &build.config.build);
+                vec![self.librustc(compiler)]
+            }
+            Source::Librustc { compiler } => {
+                vec![self.libtest(compiler), self.llvm(())]
+            }
+            Source::Libtest { compiler } => {
+                vec![self.libstd(compiler)]
+            }
+            Source::Libstd { compiler } => {
+                vec![self.compiler_rt(()),
+                     self.rustc(compiler.stage).target(compiler.host)]
+            }
+            Source::LibrustcLink { compiler, host } => {
+                vec![self.librustc(compiler),
+                     self.libtest_link(compiler, host)]
+            }
+            Source::LibtestLink { compiler, host } => {
+                vec![self.libtest(compiler), self.libstd_link(compiler, host)]
+            }
+            Source::LibstdLink { compiler, host } => {
+                vec![self.libstd(compiler),
+                     self.target(host).rustc(compiler.stage)]
+            }
+            Source::CompilerRt { _dummy } => {
+                vec![self.llvm(()).target(&build.config.build)]
+            }
+            Source::Llvm { _dummy } => Vec::new(),
+            Source::TestHelpers { _dummy } => Vec::new(),
+            Source::DebuggerScripts { stage: _ } => Vec::new(),
+
+            // Note that all doc targets depend on artifacts from the build
+            // architecture, not the target (which is where we're generating
+            // docs into).
+            Source::DocStd { stage } => {
+                let compiler = self.target(&build.config.build).compiler(stage);
+                vec![self.libstd(compiler)]
+            }
+            Source::DocTest { stage } => {
+                let compiler = self.target(&build.config.build).compiler(stage);
+                vec![self.libtest(compiler)]
+            }
+            Source::DocBook { stage } |
+            Source::DocNomicon { stage } |
+            Source::DocStyle { stage } => {
+                vec![self.target(&build.config.build).tool_rustbook(stage)]
+            }
+            Source::DocErrorIndex { stage } => {
+                vec![self.target(&build.config.build).tool_error_index(stage)]
+            }
+            Source::DocStandalone { stage } => {
+                vec![self.target(&build.config.build).rustc(stage)]
+            }
+            Source::DocRustc { stage } => {
+                vec![self.doc_test(stage)]
+            }
+            Source::Doc { stage } => {
+                vec![self.doc_book(stage), self.doc_nomicon(stage),
+                     self.doc_style(stage), self.doc_standalone(stage),
+                     self.doc_std(stage),
+                     self.doc_error_index(stage)]
+            }
+            Source::Check { stage, compiler } => {
+                // Check is just a pseudo step which means check all targets,
+                // so just depend on checking all targets.
+                build.config.target.iter().map(|t| {
+                    self.target(t).check_target(stage, compiler)
+                }).collect()
+            }
+            Source::CheckTarget { stage, compiler } => {
+                // CheckTarget here means run all possible test suites for this
+                // target. Most of the time, however, we can't actually run
+                // anything if we're not the build triple as we could be cross
+                // compiling.
+                //
+                // As a result, the base set of targets here is quite stripped
+                // down from the standard set of targets. These suites have
+                // their own internal logic to run in cross-compiled situations
+                // if they'll run at all. For example compiletest knows that
+                // when testing Android targets we ship artifacts to the
+                // emulator.
+                //
+                // When in doubt the rule of thumb for adding to this list is
+                // "should this test suite run on the android bot?"
+                let mut base = vec![
+                    self.check_rpass(compiler),
+                    self.check_rfail(compiler),
+                    self.check_crate_std(compiler),
+                    self.check_crate_test(compiler),
+                    self.check_debuginfo(compiler),
+                    self.dist(stage),
+                ];
+
+                // If we're testing the build triple, then we know we can
+                // actually run binaries and such, so we run all possible tests
+                // that we know about.
+                if self.target == build.config.build {
+                    base.extend(vec![
+                        // docs-related
+                        self.check_docs(compiler),
+                        self.check_error_index(compiler),
+                        self.check_rustdoc(compiler),
+
+                        // UI-related
+                        self.check_cfail(compiler),
+                        self.check_pfail(compiler),
+                        self.check_ui(compiler),
+
+                        // codegen-related
+                        self.check_incremental(compiler),
+                        self.check_codegen(compiler),
+                        self.check_codegen_units(compiler),
+
+                        // misc compiletest-test suites
+                        self.check_rpass_full(compiler),
+                        self.check_rfail_full(compiler),
+                        self.check_cfail_full(compiler),
+                        self.check_pretty_rpass_full(compiler),
+                        self.check_pretty_rfail_full(compiler),
+                        self.check_rpass_valgrind(compiler),
+                        self.check_rmake(compiler),
+
+                        // crates
+                        self.check_crate_rustc(compiler),
+
+                        // pretty
+                        self.check_pretty(compiler),
+                        self.check_pretty_rpass(compiler),
+                        self.check_pretty_rfail(compiler),
+                        self.check_pretty_rpass_valgrind(compiler),
+
+                        // misc
+                        self.check_linkcheck(stage),
+                        self.check_tidy(stage),
+                    ]);
+                }
+                return base
+            }
+            Source::CheckLinkcheck { stage } => {
+                vec![self.tool_linkchecker(stage), self.doc(stage)]
+            }
+            Source::CheckCargoTest { stage } => {
+                vec![self.tool_cargotest(stage),
+                     self.librustc(self.compiler(stage))]
+            }
+            Source::CheckTidy { stage } => {
+                vec![self.tool_tidy(stage)]
+            }
+            Source::CheckPrettyRPass { compiler } |
+            Source::CheckPrettyRFail { compiler } |
+            Source::CheckRFail { compiler } |
+            Source::CheckPFail { compiler } |
+            Source::CheckCodegen { compiler } |
+            Source::CheckCodegenUnits { compiler } |
+            Source::CheckIncremental { compiler } |
+            Source::CheckUi { compiler } |
+            Source::CheckRustdoc { compiler } |
+            Source::CheckPretty { compiler } |
+            Source::CheckCFail { compiler } |
+            Source::CheckRPassValgrind { compiler } |
+            Source::CheckRPass { compiler } => {
+                let mut base = vec![
+                    self.libtest(compiler),
+                    self.target(compiler.host).tool_compiletest(compiler.stage),
+                    self.test_helpers(()),
+                ];
+                if self.target.contains("android") {
+                    base.push(self.android_copy_libs(compiler));
+                }
+                base
+            }
+            Source::CheckDebuginfo { compiler } => {
+                vec![
+                    self.libtest(compiler),
+                    self.target(compiler.host).tool_compiletest(compiler.stage),
+                    self.test_helpers(()),
+                    self.debugger_scripts(compiler.stage),
+                ]
+            }
+            Source::CheckRPassFull { compiler } |
+            Source::CheckRFailFull { compiler } |
+            Source::CheckCFailFull { compiler } |
+            Source::CheckPrettyRPassFull { compiler } |
+            Source::CheckPrettyRFailFull { compiler } |
+            Source::CheckPrettyRPassValgrind { compiler } |
+            Source::CheckRMake { compiler } => {
+                vec![self.librustc(compiler),
+                     self.target(compiler.host).tool_compiletest(compiler.stage)]
+            }
+            Source::CheckDocs { compiler } => {
+                vec![self.libstd(compiler)]
+            }
+            Source::CheckErrorIndex { compiler } => {
+                vec![self.libstd(compiler),
+                     self.target(compiler.host).tool_error_index(compiler.stage)]
+            }
+            Source::CheckCrateStd { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+            Source::CheckCrateTest { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+            Source::CheckCrateRustc { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+
+            Source::ToolLinkchecker { stage } |
+            Source::ToolTidy { stage } => {
+                vec![self.libstd(self.compiler(stage))]
+            }
+            Source::ToolErrorIndex { stage } |
+            Source::ToolRustbook { stage } => {
+                vec![self.librustc(self.compiler(stage))]
+            }
+            Source::ToolCargoTest { stage } => {
+                vec![self.libstd(self.compiler(stage))]
+            }
+            Source::ToolCompiletest { stage } => {
+                vec![self.libtest(self.compiler(stage))]
+            }
+
+            Source::DistDocs { stage } => vec![self.doc(stage)],
+            Source::DistMingw { _dummy: _ } => Vec::new(),
+            Source::DistRustc { stage } => {
+                vec![self.rustc(stage)]
+            }
+            Source::DistStd { compiler } => {
+                // We want to package up as many target libraries as possible
+                // for the `rust-std` package, so if this is a host target we
+                // depend on librustc and otherwise we just depend on libtest.
+                if build.config.host.iter().any(|t| t == self.target) {
+                    vec![self.librustc(compiler)]
+                } else {
+                    vec![self.libtest(compiler)]
+                }
+            }
+
+            Source::Dist { stage } => {
+                let mut base = Vec::new();
+
+                for host in build.config.host.iter() {
+                    let host = self.target(host);
+                    base.push(host.dist_rustc(stage));
+                    if host.target.contains("windows-gnu") {
+                        base.push(host.dist_mingw(()));
+                    }
+
+                    let compiler = self.compiler(stage);
+                    for target in build.config.target.iter() {
+                        let target = self.target(target);
+                        if build.config.docs {
+                            base.push(target.dist_docs(stage));
+                        }
+                        base.push(target.dist_std(compiler));
+                    }
+                }
+                return base
+            }
+
+            Source::AndroidCopyLibs { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+        }
+    }
+}
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
new file mode 100644 (file)
index 0000000..3ef7f8c
--- /dev/null
@@ -0,0 +1,142 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Various utility functions used throughout rustbuild.
+//!
+//! Simple things like testing the various filesystem operations here and there,
+//! not a lot of interesting happenings here unfortunately.
+
+use std::env;
+use std::ffi::OsString;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use filetime::FileTime;
+
+/// Returns the `name` as the filename of a static library for `target`.
+pub fn staticlib(name: &str, target: &str) -> String {
+    if target.contains("windows-msvc") {
+        format!("{}.lib", name)
+    } else {
+        format!("lib{}.a", name)
+    }
+}
+
+/// Returns the last-modified time for `path`, or zero if it doesn't exist.
+pub fn mtime(path: &Path) -> FileTime {
+    fs::metadata(path).map(|f| {
+        FileTime::from_last_modification_time(&f)
+    }).unwrap_or(FileTime::zero())
+}
+
+/// Copies a file from `src` to `dst`, attempting to use hard links and then
+/// falling back to an actually filesystem copy if necessary.
+pub fn copy(src: &Path, dst: &Path) {
+    let res = fs::hard_link(src, dst);
+    let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
+    if let Err(e) = res {
+        panic!("failed to copy `{}` to `{}`: {}", src.display(),
+               dst.display(), e)
+    }
+}
+
+/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
+/// when this function is called.
+pub fn cp_r(src: &Path, dst: &Path) {
+    for f in t!(fs::read_dir(src)) {
+        let f = t!(f);
+        let path = f.path();
+        let name = path.file_name().unwrap();
+        let dst = dst.join(name);
+        if t!(f.file_type()).is_dir() {
+            let _ = fs::remove_dir_all(&dst);
+            t!(fs::create_dir(&dst));
+            cp_r(&path, &dst);
+        } else {
+            let _ = fs::remove_file(&dst);
+            copy(&path, &dst);
+        }
+    }
+}
+
+/// Given an executable called `name`, return the filename for the
+/// executable for a particular target.
+pub fn exe(name: &str, target: &str) -> String {
+    if target.contains("windows") {
+        format!("{}.exe", name)
+    } else {
+        name.to_string()
+    }
+}
+
+/// Returns whether the file name given looks like a dynamic library.
+pub fn is_dylib(name: &str) -> bool {
+    name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
+}
+
+/// Returns the corresponding relative library directory that the compiler's
+/// dylibs will be found in.
+pub fn libdir(target: &str) -> &'static str {
+    if target.contains("windows") {"bin"} else {"lib"}
+}
+
+/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
+pub fn add_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
+    let mut list = dylib_path();
+    for path in path {
+        list.insert(0, path);
+    }
+    cmd.env(dylib_path_var(), t!(env::join_paths(list)));
+}
+
+/// Returns whether `dst` is up to date given that the file or files in `src`
+/// are used to generate it.
+///
+/// Uses last-modified time checks to verify this.
+pub fn up_to_date(src: &Path, dst: &Path) -> bool {
+    let threshold = mtime(dst);
+    let meta = t!(fs::metadata(src));
+    if meta.is_dir() {
+        dir_up_to_date(src, &threshold)
+    } else {
+        FileTime::from_last_modification_time(&meta) <= threshold
+    }
+}
+
+fn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {
+    t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {
+        let meta = t!(e.metadata());
+        if meta.is_dir() {
+            dir_up_to_date(&e.path(), threshold)
+        } else {
+            FileTime::from_last_modification_time(&meta) < *threshold
+        }
+    })
+}
+
+/// Returns the environment variable which the dynamic library lookup path
+/// resides in for this platform.
+pub fn dylib_path_var() -> &'static str {
+    if cfg!(target_os = "windows") {
+        "PATH"
+    } else if cfg!(target_os = "macos") {
+        "DYLD_LIBRARY_PATH"
+    } else {
+        "LD_LIBRARY_PATH"
+    }
+}
+
+/// Parses the `dylib_path_var()` environment variable, returning a list of
+/// paths that are members of this lookup path.
+pub fn dylib_path() -> Vec<PathBuf> {
+    env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
+        .collect()
+}
index a6b4e9492181c790fe5a3c040ca6e02397824d8f..e8c88b7db0699d1ebe03b1827c29c4607aac3dd2 100644 (file)
@@ -339,7 +339,7 @@ fn call_with_ref<'a, F>(some_closure:F) -> i32
     where F: Fn(&'a 32) -> i32 {
 ```
 
-However this presents a problem with in our case. When you specify the explict
+However this presents a problem with in our case. When you specify the explicit
 lifetime on a function it binds that lifetime to the *entire* scope of the function
 instead of just the invocation scope of our closure. This means that the borrow checker
 will see a mutable reference in the same lifetime as our immutable reference and fail
@@ -354,7 +354,7 @@ fn call_with_ref<F>(some_closure:F) -> i32
 ```
 
 This lets the Rust compiler find the minimum lifetime to invoke our closure and
-satisfy the borrow checker's rules. Our function then compiles and excutes as we
+satisfy the borrow checker's rules. Our function then compiles and executes as we
 expect.
 
 ```rust
index a6ff75db89b88ddccbefe9f1b315df47d9e14240..78ab3c18e4561988429bdb55ffbec9da21eb8caf 100644 (file)
@@ -41,8 +41,9 @@ they get set in the [`[features]` section][features] of your `Cargo.toml`:
 # no features by default
 default = []
 
-# The “secure-password” feature depends on the bcrypt package.
-secure-password = ["bcrypt"]
+# Add feature "foo" here, then you can use it. 
+# Our "foo" feature depends on nothing else.
+foo = []
 ```
 
 When you do this, Cargo passes along a flag to `rustc`:
index 3c6643fbfe1554e0ae02c5bc551f0a04353715c2..6292ba9aac40317c41e590bbc58d549e2f6df179 100644 (file)
@@ -486,6 +486,17 @@ you have a module in `foo.rs`, you'll often open its code and see this:
 //! The `foo` module contains a lot of useful functionality blah blah blah
 ```
 
+### Crate documentation
+
+Crates can be documented by placing an inner doc comment (`//!`) at the
+beginning of the crate root, aka `lib.rs`:
+
+```rust
+//! This is documentation for the `foo` crate.
+//!
+//! The foo crate is meant to be used for bar.
+```
+
 ### Documentation comment style
 
 Check out [RFC 505][rfc505] for full conventions around the style and format of
index e7d05a8d93a561eccb11f340ced8c8caac0b43af..700ab2be589326f5b30521f66cbc94705ae5c341 100644 (file)
@@ -11,7 +11,7 @@ an Internet connection to run the commands in this section, as we’ll be
 downloading Rust from the Internet.
 
 We’ll be showing off a number of commands using a terminal, and those lines all
-start with `$`. We don't need to type in the `$`s, they are there to indicate
+start with `$`. You don't need to type in the `$`s, they are there to indicate
 the start of each command. We’ll see many tutorials and examples around the web
 that follow this convention: `$` for commands run as our regular user, and `#`
 for commands we should be running as an administrator.
@@ -159,9 +159,11 @@ You should see the version number, commit hash, and commit date.
 If you do, Rust has been installed successfully! Congrats!
 
 If you don't and you're on Windows, check that Rust is in your %PATH% system
-variable. If it isn't, run the installer again, select "Change" on the "Change,
-repair, or remove installation" page and ensure "Add to PATH" is installed on
-the local hard drive.
+variable: `$ echo %PATH%`. If it isn't, run the installer again, select "Change"
+on the "Change, repair, or remove installation" page and ensure "Add to PATH" is
+installed on the local hard drive.  If you need to configure your path manually,
+you can find the Rust executables in a directory like
+`"C:\Program Files\Rust stable GNU 1.x\bin"`.
 
 Rust does not do its own linking, and so you’ll need to have a linker
 installed. Doing so will depend on your specific system, consult its
@@ -339,7 +341,8 @@ On Windows, you'd enter:
 
 ```bash
 $ dir
-main.exe  main.rs
+main.exe
+main.rs
 ```
 
 This shows we have two files: the source code, with an `.rs` extension, and the
@@ -347,7 +350,7 @@ executable (`main.exe` on Windows, `main` everywhere else). All that's left to
 do from here is run the `main` or `main.exe` file, like this:
 
 ```bash
-$ ./main  # or main.exe on Windows
+$ ./main  # or .\main.exe on Windows
 ```
 
 If *main.rs* were your "Hello, world!" program, this would print `Hello,
index c759ff9bdbde48e845a64dfbc158f198bd8178ff..6ce75efd1031d83ce7d372081090542a6091bf9b 100644 (file)
@@ -370,7 +370,7 @@ We could also use a range of versions.
 [Cargo’s documentation][cargodoc] contains more details.
 
 [semver]: http://semver.org
-[cargodoc]: http://doc.crates.io/crates-io.html
+[cargodoc]: http://doc.crates.io/specifying-dependencies.html
 
 Now, without changing any of our code, let’s build our project:
 
index 2c2d89a1fbf9ed3b46c881a19c0108fde7fd2217..a8340d9d31e79d2fe319794ef49f90da988b7721 100644 (file)
@@ -57,7 +57,7 @@ but you must add the right number of `:` if you skip them:
 asm!("xor %eax, %eax"
     :
     :
-    : "{eax}"
+    : "eax"
    );
 # } }
 ```
@@ -68,7 +68,7 @@ Whitespace also doesn't matter:
 # #![feature(asm)]
 # #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 # fn main() { unsafe {
-asm!("xor %eax, %eax" ::: "{eax}");
+asm!("xor %eax, %eax" ::: "eax");
 # } }
 ```
 
@@ -127,7 +127,7 @@ stay valid.
 # #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 # fn main() { unsafe {
 // Put the value 0x200 in eax
-asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "{eax}");
+asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "eax");
 # } }
 ```
 
index e23e6f3a786a5c0e816edb6f27370f3aaf20caf7..e681d1bee06184549861f1045c1b765a1c2a0f7e 100644 (file)
@@ -105,7 +105,7 @@ When you need to keep track of how many times you already looped, you can use th
 #### On ranges:
 
 ```rust
-for (i,j) in (5..10).enumerate() {
+for (i, j) in (5..10).enumerate() {
     println!("i = {} and j = {}", i, j);
 }
 ```
index e46271511462449930ee4859ae7bbd3505d34a36..a0a49d55e105740572194aedfd870f25534b1a50 100644 (file)
@@ -62,8 +62,8 @@ Note that here, the `x` is mutable, but not the `y`.
 # Interior vs. Exterior Mutability
 
 However, when we say something is ‘immutable’ in Rust, that doesn’t mean that
-it’s not able to be changed: we mean something has ‘exterior mutability’. Consider,
-for example, [`Arc<T>`][arc]:
+it’s not able to be changed: we are referring to its ‘exterior mutability’ that
+in this case is immutable. Consider, for example, [`Arc<T>`][arc]:
 
 ```rust
 use std::sync::Arc;
index b2fddf336273fe0d1f3d663e7bd09886579e41fb..328db25b819d89d236af7e8c2d8ea82ce63a5e1b 100644 (file)
@@ -163,11 +163,51 @@ struct Point(i32, i32, i32);
 let black = Color(0, 0, 0);
 let origin = Point(0, 0, 0);
 ```
-Here, `black` and `origin` are not equal, even though they contain the same
-values.
 
-It is almost always better to use a `struct` than a tuple struct. We
-would write `Color` and `Point` like this instead:
+Here, `black` and `origin` are not the same type, even though they contain the
+same values.
+
+The members of a tuple struct may be accessed by dot notation or destructuring
+`let`, just like regular tuples:
+
+```rust
+# struct Color(i32, i32, i32);
+# struct Point(i32, i32, i32);
+# let black = Color(0, 0, 0);
+# let origin = Point(0, 0, 0);
+let black_r = black.0;
+let Point(_, origin_y, origin_z) = origin;
+```
+
+Patterns like `Point(_, origin_y, origin_z)` are also used in
+[match expressions][match].
+
+One case when a tuple struct is very useful is when it has only one element.
+We call this the ‘newtype’ pattern, because it allows you to create a new type
+that is distinct from its contained value and also expresses its own semantic
+meaning:
+
+```rust
+struct Inches(i32);
+
+let length = Inches(10);
+
+let Inches(integer_length) = length;
+println!("length is {} inches", integer_length);
+```
+
+As above, you can extract the inner integer type through a destructuring `let`.
+In this case, the `let Inches(integer_length)` assigns `10` to `integer_length`.
+We could have used dot notation to do the same thing:
+
+```rust
+# struct Inches(i32);
+# let length = Inches(10);
+let integer_length = length.0;
+```
+
+It's always possible to use a `struct` instead of a tuple struct, and can be
+clearer. We could write `Color` and `Point` like this instead:
 
 ```rust
 struct Color {
@@ -187,32 +227,19 @@ Good names are important, and while values in a tuple struct can be
 referenced with dot notation as well, a `struct` gives us actual names,
 rather than positions.
 
-There _is_ one case when a tuple struct is very useful, though, and that is when
-it has only one element. We call this the ‘newtype’ pattern, because
-it allows you to create a new type that is distinct from its contained value
-and also expresses its own semantic meaning:
-
-```rust
-struct Inches(i32);
-
-let length = Inches(10);
-
-let Inches(integer_length) = length;
-println!("length is {} inches", integer_length);
-```
-
-As you can see here, you can extract the inner integer type through a
-destructuring `let`, as with regular tuples. In this case, the
-`let Inches(integer_length)` assigns `10` to `integer_length`.
+[match]: match.html
 
 # Unit-like structs
 
 You can define a `struct` with no members at all:
 
 ```rust
-struct Electron;
+struct Electron {} // use empty braces...
+struct Proton;     // ...or just a semicolon
 
-let x = Electron;
+// whether you declared the struct with braces or not, do the same when creating one
+let x = Electron {};
+let y = Proton;
 ```
 
 Such a `struct` is called ‘unit-like’ because it resembles the empty
index 7954085472e503dc64eceb946a71b89a11e80ab6..86729147ed0652befde62ee0834f12ec2c095d1c 100644 (file)
@@ -431,7 +431,7 @@ one.
 
 Cargo will ignore files in subdirectories of the `tests/` directory.
 Therefore shared modules in integrations tests are possible.
-For example `tests/common/mod.rs` is not seperatly compiled by cargo but can 
+For example `tests/common/mod.rs` is not separately compiled by cargo but can
 be imported in every test with `mod common;`
 
 That's all there is to the `tests` directory. The `tests` module isn't needed
index 33f22e8579664815349f342aa78794639de159e1..554ab66bc563d85328bd8c8303bcf035ac8fb380 100755 (executable)
 
 import gdb
 import re
+import sys
 import debugger_pretty_printers_common as rustpp
 
+# We want a version of `range` which doesn't allocate an intermediate list,
+# specifically it should use a lazy iterator. In Python 2 this was `xrange`, but
+# if we're running with Python 3 then we need to use `range` instead.
+if sys.version_info.major >= 3:
+    xrange = range
+
 #===============================================================================
 # GDB Pretty Printing Module for Rust
 #===============================================================================
@@ -215,7 +222,7 @@ class RustSlicePrinter:
         assert data_ptr.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
         raw_ptr = data_ptr.get_wrapped_value()
 
-        for index in range(0, length):
+        for index in xrange(0, length):
             yield (str(index), (raw_ptr + index).dereference())
 
 
@@ -244,7 +251,7 @@ class RustStdVecPrinter:
     def children(self):
         (length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(self.__val)
         gdb_ptr = data_ptr.get_wrapped_value()
-        for index in range(0, length):
+        for index in xrange(0, length):
             yield (str(index), (gdb_ptr + index).dereference())
 
 
index a873be455d5558c673c3f6cd4d0d50f89f611bbd..2beb652aa017a6d2c16e959afe05d592a203224d 100644 (file)
@@ -10,7 +10,8 @@
 
 #![allow(deprecated)]
 
-//! Thread-local reference-counted boxes (the `Rc<T>` type).
+//! Unsynchronized reference-counted boxes (the `Rc<T>` type) which are usable
+//! only within a single thread.
 //!
 //! The `Rc<T>` type provides shared ownership of an immutable value.
 //! Destruction is deterministic, and will occur as soon as the last owner is
index 3ebab266e2ffed5494cc069a90172e64833f9b83..dffe9dee022a68704fc565efa10467bc56c2725b 100644 (file)
@@ -1198,17 +1198,15 @@ impl<I: ExactSizeIterator> ExactSizeIterator for Peekable<I> {}
 impl<I: Iterator> Peekable<I> {
     /// Returns a reference to the next() value without advancing the iterator.
     ///
-    /// The `peek()` method will return the value that a call to [`next()`] would
-    /// return, but does not advance the iterator. Like [`next()`], if there is
-    /// a value, it's wrapped in a `Some(T)`, but if the iterator is over, it
-    /// will return `None`.
+    /// Like [`next()`], if there is a value, it is wrapped in a `Some(T)`.
+    /// But if the iteration is over, `None` is returned.
     ///
     /// [`next()`]: trait.Iterator.html#tymethod.next
     ///
-    /// Because `peek()` returns reference, and many iterators iterate over
-    /// references, this leads to a possibly confusing situation where the
+    /// Because `peek()` returns reference, and many iterators iterate over
+    /// references, there can be a possibly confusing situation where the
     /// return value is a double reference. You can see this effect in the
-    /// examples below, with `&&i32`.
+    /// examples below.
     ///
     /// # Examples
     ///
@@ -1225,13 +1223,13 @@ impl<I: Iterator> Peekable<I> {
     ///
     /// assert_eq!(iter.next(), Some(&2));
     ///
-    /// // we can peek() multiple times, the iterator won't advance
+    /// // The iterator does not advance even if we `peek` multiple times
     /// assert_eq!(iter.peek(), Some(&&3));
     /// assert_eq!(iter.peek(), Some(&&3));
     ///
     /// assert_eq!(iter.next(), Some(&3));
     ///
-    /// // after the iterator is finished, so is peek()
+    /// // After the iterator is finished, so is `peek()`
     /// assert_eq!(iter.peek(), None);
     /// assert_eq!(iter.next(), None);
     /// ```
@@ -1263,10 +1261,10 @@ pub fn peek(&mut self) -> Option<&I::Item> {
     ///
     /// let mut iter = xs.iter().peekable();
     ///
-    /// // there are still elements to iterate over
+    /// // There are still elements to iterate over
     /// assert_eq!(iter.is_empty(), false);
     ///
-    /// // let's consume the iterator
+    /// // Let's consume the iterator
     /// iter.next();
     /// iter.next();
     /// iter.next();
index 3549bd6a3bc68f4fcfa52cab27ed36a9817b5378..9b5c2128f1eaf9fd59460d6fc432794f0a05fc26 100644 (file)
@@ -371,13 +371,16 @@ pub trait Extend<A> {
 /// Basic usage:
 ///
 /// ```
-/// let numbers = vec![1, 2, 3];
+/// let numbers = vec![1, 2, 3, 4, 5, 6];
 ///
 /// let mut iter = numbers.iter();
 ///
 /// assert_eq!(Some(&1), iter.next());
-/// assert_eq!(Some(&3), iter.next_back());
-/// assert_eq!(Some(&2), iter.next_back());
+/// assert_eq!(Some(&6), iter.next_back());
+/// assert_eq!(Some(&5), iter.next_back());
+/// assert_eq!(Some(&2), iter.next());
+/// assert_eq!(Some(&3), iter.next());
+/// assert_eq!(Some(&4), iter.next());
 /// assert_eq!(None, iter.next());
 /// assert_eq!(None, iter.next_back());
 /// ```
@@ -395,13 +398,16 @@ pub trait DoubleEndedIterator: Iterator {
     /// Basic usage:
     ///
     /// ```
-    /// let numbers = vec![1, 2, 3];
+    /// let numbers = vec![1, 2, 3, 4, 5, 6];
     ///
     /// let mut iter = numbers.iter();
     ///
     /// assert_eq!(Some(&1), iter.next());
-    /// assert_eq!(Some(&3), iter.next_back());
-    /// assert_eq!(Some(&2), iter.next_back());
+    /// assert_eq!(Some(&6), iter.next_back());
+    /// assert_eq!(Some(&5), iter.next_back());
+    /// assert_eq!(Some(&2), iter.next());
+    /// assert_eq!(Some(&3), iter.next());
+    /// assert_eq!(Some(&4), iter.next());
     /// assert_eq!(None, iter.next());
     /// assert_eq!(None, iter.next_back());
     /// ```
index 79e1462eaa135eb58013a157fb584f6823b5485f..07b05f91f489f9ecfa3752b2175e5f0bc35a3251 100644 (file)
 use num::Float;
 use num::FpCategory as Fp;
 
+/// The radix or base of the internal representation of `f32`.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const RADIX: u32 = 2;
 
+/// Number of significant digits in base 2.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MANTISSA_DIGITS: u32 = 24;
+/// Approximate number of significant digits in base 10.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const DIGITS: u32 = 6;
 
+/// Difference between `1.0` and the next largest representable number.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const EPSILON: f32 = 1.19209290e-07_f32;
 
-/// Smallest finite f32 value
+/// Smallest finite `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN: f32 = -3.40282347e+38_f32;
-/// Smallest positive, normalized f32 value
+/// Smallest positive normal `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN_POSITIVE: f32 = 1.17549435e-38_f32;
-/// Largest finite f32 value
+/// Largest finite `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MAX: f32 = 3.40282347e+38_f32;
 
+/// One greater than the minimum possible normal power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_EXP: i32 = -125;
+/// Maximum possible power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_EXP: i32 = 128;
 
+/// Minimum possible normal power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_10_EXP: i32 = -37;
+/// Maximum possible power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_10_EXP: i32 = 38;
 
+/// Not a Number (NaN).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NAN: f32 = 0.0_f32/0.0_f32;
+/// Infinity (∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const INFINITY: f32 = 1.0_f32/0.0_f32;
+/// Negative infinity (-∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NEG_INFINITY: f32 = -1.0_f32/0.0_f32;
 
 /// Basic mathematical constants.
 pub mod consts {
     // FIXME: replace with mathematical constants from cmath.
 
-    /// Archimedes' constant
+    /// Archimedes' constant (π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const PI: f32 = 3.14159265358979323846264338327950288_f32;
 
-    /// pi/2.0
+    /// π/2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_2: f32 = 1.57079632679489661923132169163975144_f32;
 
-    /// pi/3.0
+    /// π/3
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_3: f32 = 1.04719755119659774615421446109316763_f32;
 
-    /// pi/4.0
+    /// π/4
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_4: f32 = 0.785398163397448309615660845819875721_f32;
 
-    /// pi/6.0
+    /// π/6
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_6: f32 = 0.52359877559829887307710723054658381_f32;
 
-    /// pi/8.0
+    /// π/8
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_8: f32 = 0.39269908169872415480783042290993786_f32;
 
-    /// 1.0/pi
+    /// 1
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_PI: f32 = 0.318309886183790671537767526745028724_f32;
 
-    /// 2.0/pi
+    /// 2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_PI: f32 = 0.636619772367581343075535053490057448_f32;
 
-    /// 2.0/sqrt(pi)
+    /// 2/sqrt(π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_SQRT_PI: f32 = 1.12837916709551257389615890312154517_f32;
 
-    /// sqrt(2.0)
+    /// sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const SQRT_2: f32 = 1.41421356237309504880168872420969808_f32;
 
-    /// 1.0/sqrt(2.0)
+    /// 1/sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_SQRT_2: f32 = 0.707106781186547524400844362104849039_f32;
 
-    /// Euler's number
+    /// Euler's number (e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const E: f32 = 2.71828182845904523536028747135266250_f32;
 
-    /// log2(e)
+    /// log<sub>2</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG2_E: f32 = 1.44269504088896340735992468100189214_f32;
 
-    /// log10(e)
+    /// log<sub>10</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG10_E: f32 = 0.434294481903251827651128918916605082_f32;
 
-    /// ln(2.0)
+    /// ln(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_2: f32 = 0.693147180559945309417232121458176568_f32;
 
-    /// ln(10.0)
+    /// ln(10)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_10: f32 = 2.30258509299404568401799145468436421_f32;
 }
index 35557f61c45420b5ff291aa369876a63e94be7aa..82a09e599e027a49065a342fcaac64fd31da2a79 100644 (file)
 use num::FpCategory as Fp;
 use num::Float;
 
+/// The radix or base of the internal representation of `f64`.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const RADIX: u32 = 2;
 
+/// Number of significant digits in base 2.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MANTISSA_DIGITS: u32 = 53;
+/// Approximate number of significant digits in base 10.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const DIGITS: u32 = 15;
 
+/// Difference between `1.0` and the next largest representable number.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const EPSILON: f64 = 2.2204460492503131e-16_f64;
 
-/// Smallest finite f64 value
+/// Smallest finite `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN: f64 = -1.7976931348623157e+308_f64;
-/// Smallest positive, normalized f64 value
+/// Smallest positive normal `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN_POSITIVE: f64 = 2.2250738585072014e-308_f64;
-/// Largest finite f64 value
+/// Largest finite `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MAX: f64 = 1.7976931348623157e+308_f64;
 
+/// One greater than the minimum possible normal power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_EXP: i32 = -1021;
+/// Maximum possible power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_EXP: i32 = 1024;
 
+/// Minimum possible normal power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_10_EXP: i32 = -307;
+/// Maximum possible power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_10_EXP: i32 = 308;
 
+/// Not a Number (NaN).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NAN: f64 = 0.0_f64/0.0_f64;
+/// Infinity (∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const INFINITY: f64 = 1.0_f64/0.0_f64;
+/// Negative infinity (-∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NEG_INFINITY: f64 = -1.0_f64/0.0_f64;
 
 /// Basic mathematical constants.
 pub mod consts {
     // FIXME: replace with mathematical constants from cmath.
 
-    /// Archimedes' constant
+    /// Archimedes' constant (π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const PI: f64 = 3.14159265358979323846264338327950288_f64;
 
-    /// pi/2.0
+    /// π/2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_2: f64 = 1.57079632679489661923132169163975144_f64;
 
-    /// pi/3.0
+    /// π/3
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_3: f64 = 1.04719755119659774615421446109316763_f64;
 
-    /// pi/4.0
+    /// π/4
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_4: f64 = 0.785398163397448309615660845819875721_f64;
 
-    /// pi/6.0
+    /// π/6
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_6: f64 = 0.52359877559829887307710723054658381_f64;
 
-    /// pi/8.0
+    /// π/8
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_8: f64 = 0.39269908169872415480783042290993786_f64;
 
-    /// 1.0/pi
+    /// 1
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_PI: f64 = 0.318309886183790671537767526745028724_f64;
 
-    /// 2.0/pi
+    /// 2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_PI: f64 = 0.636619772367581343075535053490057448_f64;
 
-    /// 2.0/sqrt(pi)
+    /// 2/sqrt(π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_SQRT_PI: f64 = 1.12837916709551257389615890312154517_f64;
 
-    /// sqrt(2.0)
+    /// sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const SQRT_2: f64 = 1.41421356237309504880168872420969808_f64;
 
-    /// 1.0/sqrt(2.0)
+    /// 1/sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_SQRT_2: f64 = 0.707106781186547524400844362104849039_f64;
 
-    /// Euler's number
+    /// Euler's number (e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const E: f64 = 2.71828182845904523536028747135266250_f64;
 
-    /// log2(e)
+    /// log<sub>2</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
 
-    /// log10(e)
+    /// log<sub>10</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG10_E: f64 = 0.434294481903251827651128918916605082_f64;
 
-    /// ln(2.0)
+    /// ln(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_2: f64 = 0.693147180559945309417232121458176568_f64;
 
-    /// ln(10.0)
+    /// ln(10)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_10: f64 = 2.30258509299404568401799145468436421_f64;
 }
index bd6cfc427affd04a936a2a115440c5989eb7606f..e74c30d5e5af8db76279a044e1051269f2354bbf 100644 (file)
 
 macro_rules! int_module { ($T:ident, $bits:expr) => (
 
+/// The smallest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN: $T = $T::min_value();
+/// The largest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX: $T = $T::max_value();
 
 ) }
index 0d79398a8f1d51bd26e975a0a45fe16fe5b58aab..b41ef7984bbab5c3131ba0bd9a84a20a307cdfb1 100644 (file)
@@ -11,7 +11,6 @@
 //! Numeric traits and functions for the built-in numeric types.
 
 #![stable(feature = "rust1", since = "1.0.0")]
-#![allow(missing_docs)]
 
 use char::CharExt;
 use cmp::PartialOrd;
index 2ab2f9548ef1bfd5ec67ccfb79b1a43efa333329..cc9256ab6bf4ee34fd5e2126eb8f9e66e77461b5 100644 (file)
 
 macro_rules! uint_module { ($T:ident, $bits:expr) => (
 
+/// The smallest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN: $T = $T::min_value();
+/// The largest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX: $T = $T::max_value();
 
 ) }
index a139dd152f006f5bcc51d26c0d26a1cb33ea7a21..e1e681b7aff3541e3a8fe2c2589d85aca9128ada 100644 (file)
@@ -836,7 +836,7 @@ pub enum Expr_ {
     ExprVec(HirVec<P<Expr>>),
     /// A function call
     ///
-    /// The first field resolves to the function itself,
+    /// The first field resolves to the function itself (usually an `ExprPath`),
     /// and the second field is the list of arguments
     ExprCall(P<Expr>, HirVec<P<Expr>>),
     /// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`)
@@ -845,9 +845,9 @@ pub enum Expr_ {
     /// The vector of `Ty`s are the ascripted type parameters for the method
     /// (within the angle brackets).
     ///
-    /// The first element of the vector of `Expr`s is the expression that evaluates
-    /// to the object on which the method is being called on (the receiver),
-    /// and the remaining elements are the rest of the arguments.
+    /// The first element of the vector of `Expr`s is the expression that
+    /// evaluates to the object on which the method is being called on (the
+    /// receiver), and the remaining elements are the rest of the arguments.
     ///
     /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
     /// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`.
@@ -919,13 +919,13 @@ pub enum Expr_ {
     /// Inline assembly (from `asm!`), with its outputs and inputs.
     ExprInlineAsm(InlineAsm, Vec<P<Expr>>, Vec<P<Expr>>),
 
-    /// A struct literal expression.
+    /// A struct or struct-like variant literal expression.
     ///
     /// For example, `Foo {x: 1, y: 2}`, or
     /// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
     ExprStruct(Path, HirVec<Field>, Option<P<Expr>>),
 
-    /// A vector literal constructed from one repeated element.
+    /// An array literal constructed from one repeated element.
     ///
     /// For example, `[1; 5]`. The first expression is the element
     /// to be repeated; the second is the number of times to repeat it.
@@ -950,14 +950,21 @@ pub struct QSelf {
     pub position: usize,
 }
 
+/// Hints at the original code for a `match _ { .. }`
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum MatchSource {
+    /// A `match _ { .. }`
     Normal,
+    /// An `if let _ = _ { .. }` (optionally with `else { .. }`)
     IfLetDesugar {
         contains_else_clause: bool,
     },
+    /// A `while let _ = _ { .. }` (which was desugared to a
+    /// `loop { match _ { .. } }`)
     WhileLetDesugar,
+    /// A desugared `for _ in _ { .. }` loop
     ForLoopDesugar,
+    /// A desugared `?` operator
     TryDesugar,
 }
 
@@ -975,8 +982,7 @@ pub struct MutTy {
     pub mutbl: Mutability,
 }
 
-/// Represents a method's signature in a trait declaration,
-/// or in an implementation.
+/// Represents a method's signature in a trait declaration or implementation.
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct MethodSig {
     pub unsafety: Unsafety,
@@ -999,13 +1005,20 @@ pub struct TraitItem {
     pub span: Span,
 }
 
+/// Represents a trait method or associated constant or type
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum TraitItem_ {
+    /// An associated constant with an optional value (otherwise `impl`s
+    /// must contain a value)
     ConstTraitItem(P<Ty>, Option<P<Expr>>),
+    /// A method with an optional body
     MethodTraitItem(MethodSig, Option<P<Block>>),
+    /// An associated type with (possibly empty) bounds and optional concrete
+    /// type
     TypeTraitItem(TyParamBounds, Option<P<Ty>>),
 }
 
+/// Represents anything within an `impl` block
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct ImplItem {
     pub id: NodeId,
@@ -1017,10 +1030,15 @@ pub struct ImplItem {
     pub span: Span,
 }
 
+/// Represents different contents within `impl`s
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum ImplItemKind {
+    /// An associated constant of the given type, set to the constant result
+    /// of the expression
     Const(P<Ty>, P<Expr>),
+    /// A method implementation with the given signature and body
     Method(MethodSig, P<Block>),
+    /// An associated type
     Type(P<Ty>),
 }
 
index 66b0d663424aa829a95c673de86b76e27b63ae8e..a7fb039c295f8bdbda2e9cb3356e3ab1a8232a08 100644 (file)
@@ -980,7 +980,7 @@ pub struct Resolver<'a> {
     //
     // There will be an anonymous module created around `g` with the ID of the
     // entry block for `f`.
-    module_map: NodeMap<Module<'a>>,
+    pub module_map: NodeMap<Module<'a>>,
 
     // Whether or not to print error messages. Can be set to true
     // when getting additional info for error message suggestions,
@@ -2674,6 +2674,34 @@ fn with_no_errors<T, F>(&mut self, f: F) -> T
         rs
     }
 
+    // Calls `f` with a `Resolver` whose current lexical scope is `module`'s lexical scope,
+    // i.e. the module's items and the prelude (unless the module is `#[no_implicit_prelude]`).
+    // FIXME #34673: This needs testing.
+    pub fn with_module_lexical_scope<T, F>(&mut self, module: Module<'a>, f: F) -> T
+        where F: FnOnce(&mut Resolver<'a>) -> T,
+    {
+        self.with_empty_ribs(|this| {
+            this.value_ribs.push(Rib::new(ModuleRibKind(module)));
+            this.type_ribs.push(Rib::new(ModuleRibKind(module)));
+            f(this)
+        })
+    }
+
+    fn with_empty_ribs<T, F>(&mut self, f: F) -> T
+        where F: FnOnce(&mut Resolver<'a>) -> T,
+    {
+        use ::std::mem::replace;
+        let value_ribs = replace(&mut self.value_ribs, Vec::new());
+        let type_ribs = replace(&mut self.type_ribs, Vec::new());
+        let label_ribs = replace(&mut self.label_ribs, Vec::new());
+
+        let result = f(self);
+        self.value_ribs = value_ribs;
+        self.type_ribs = type_ribs;
+        self.label_ribs = label_ribs;
+        result
+    }
+
     fn find_fallback_in_self_type(&mut self, name: Name) -> FallbackSuggestion {
         fn extract_node_id(t: &Ty) -> Option<NodeId> {
             match t.node {
@@ -2880,8 +2908,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
                                 if !msg.is_empty() {
                                     msg = format!(". Did you mean {}?", msg);
                                 } else {
-                                    // we check if this a module and if so, we display a help
-                                    // message
+                                    // we display a help message if this is a module
                                     let name_path = path.segments.iter()
                                                         .map(|seg| seg.identifier.name)
                                                         .collect::<Vec<_>>();
index c1960eeee46b8fb7eccede5f0fda8f5f911e96b4..4ffb5477305493e1e366a73dd125a6ffde0b85eb 100644 (file)
@@ -29,6 +29,7 @@
 
 use rustc::hir::def::Def;
 use rustc::hir::def_id::DefId;
+use rustc::hir::map::Node;
 use rustc::session::Session;
 use rustc::ty::{self, TyCtxt, ImplOrTraitItem, ImplOrTraitItemContainer};
 
@@ -1299,7 +1300,14 @@ fn visit_expr(&mut self, ex: &ast::Expr) {
             ast::ExprKind::TupField(ref sub_ex, idx) => {
                 self.visit_expr(&sub_ex);
 
-                let hir_node = self.save_ctxt.tcx.map.expect_expr(sub_ex.id);
+                let hir_node = match self.save_ctxt.tcx.map.find(sub_ex.id) {
+                    Some(Node::NodeExpr(expr)) => expr,
+                    _ => {
+                        debug!("Missing or weird node for sub-expression {} in {:?}",
+                               sub_ex.id, ex);
+                        return;
+                    }
+                };
                 let ty = &self.tcx.expr_ty_adjusted(&hir_node).sty;
                 match *ty {
                     ty::TyStruct(def, _) => {
index 3ef6e29a6f83894da32612e3ba11b5fa4fcd780a..10af326be26a3091af33c7709bae7bd11d9c0f6f 100644 (file)
@@ -1495,20 +1495,27 @@ fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
     fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
 
     fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
+        let cmt_id = |cmt: &mc::cmt| match cmt.cat {
+            Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, ..}, ..}) |
+            Categorization::Local(vid) => Some(vid),
+            Categorization::Interior(ref base_cmt, mc::InteriorField(_)) => Some(base_cmt.id),
+            _ => None
+        };
         match cmt.cat {
             Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
             Categorization::Local(vid) => self.reassigned |= self.node == vid,
-            Categorization::Interior(ref base_cmt, mc::InteriorField(field)) => {
-                match base_cmt.cat {
-                    Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
-                    Categorization::Local(vid) => {
-                        self.reassigned |= self.node == vid &&
-                            (self.field.is_none() || Some(field) == self.field)
-                    },
-                    _ => {}
+            ref cat => {
+                let mut cat = cat;
+                while let &Categorization::Interior(ref base_cmt, mc::InteriorField(field)) = cat {
+                    if let Some(vid) = cmt_id(base_cmt) {
+                        if self.node == vid && (self.field.is_none() || self.field == Some(field)) {
+                            self.reassigned = true;
+                            return;
+                        }
+                    }
+                    cat = &base_cmt.cat;
                 }
-            },
-            _ => {}
+            }
         }
     }
 }
index 7a572fdadc3d795bdac78a20a3bb3ea80449ed96..590220f0c8b6478c172254f830e950fcf7804f31 100644 (file)
@@ -75,7 +75,6 @@
 use declare;
 use expr;
 use glue;
-use inline;
 use machine;
 use machine::{llalign_of_min, llsize_of, llsize_of_real};
 use meth;
@@ -1407,19 +1406,17 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
     pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
                llfndecl: ValueRef,
                fn_ty: FnType,
-               definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi)>,
+               definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi, ast::NodeId)>,
                block_arena: &'blk TypedArena<common::BlockS<'blk, 'tcx>>)
                -> FunctionContext<'blk, 'tcx> {
-        let (param_substs, def_id) = match definition {
-            Some((instance, _, _)) => {
+        let (param_substs, def_id, inlined_id) = match definition {
+            Some((instance, _, _, inlined_id)) => {
                 common::validate_substs(instance.substs);
-                (instance.substs, Some(instance.def))
+                (instance.substs, Some(instance.def), Some(inlined_id))
             }
-            None => (ccx.tcx().mk_substs(Substs::empty()), None)
+            None => (ccx.tcx().mk_substs(Substs::empty()), None, None)
         };
 
-        let inlined_did = def_id.and_then(|def_id| inline::get_local_instance(ccx, def_id));
-        let inlined_id = inlined_did.and_then(|id| ccx.tcx().map.as_local_node_id(id));
         let local_id = def_id.and_then(|id| ccx.tcx().map.as_local_node_id(id));
 
         debug!("FunctionContext::new({})",
@@ -1454,7 +1451,7 @@ pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
         };
 
         let debug_context = if let (false, Some(definition)) = (no_debug, definition) {
-            let (instance, sig, abi) = definition;
+            let (instance, sig, abi, _) = definition;
             debuginfo::create_function_debug_context(ccx, instance, sig, abi, llfndecl)
         } else {
             debuginfo::empty_function_debug_context(ccx)
@@ -1850,7 +1847,11 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let (arena, fcx): (TypedArena<_>, FunctionContext);
     arena = TypedArena::new();
-    fcx = FunctionContext::new(ccx, llfndecl, fn_ty, Some((instance, sig, abi)), &arena);
+    fcx = FunctionContext::new(ccx,
+                               llfndecl,
+                               fn_ty,
+                               Some((instance, sig, abi, inlined_id)),
+                               &arena);
 
     if fcx.mir.is_some() {
         return mir::trans_mir(&fcx);
index 84e98a6739193e9b917135c3b218e00cb15bd08f..096e1ecc9ffb6a33a67b1dceb84a2b8837dd1d26 100644 (file)
@@ -107,7 +107,7 @@ pub enum Class {
 ///
 /// The classifier will call into the `Writer` implementation as it finds spans
 /// of text to highlight. Exactly how that text should be highlighted is up to
-/// the implemention.
+/// the implementation.
 pub trait Writer {
     /// Called when we start processing a span of text that should be highlighted.
     /// The `Class` argument specifies how it should be highlighted.
index 6ab2bcc768590f16a5285188725d06f0c147039d..c263bcb04e9b6fc62b199100b5a65c6a47211598 100644 (file)
@@ -2716,7 +2716,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
         let parentlen = cx.current.len() - if it.is_mod() {1} else {0};
 
         // the sidebar is designed to display sibling functions, modules and
-        // other miscellaneous informations. since there are lots of sibling
+        // other miscellaneous information. since there are lots of sibling
         // items (and that causes quadratic growth in large modules),
         // we refactor common parts into a shared JavaScript file per module.
         // still, we don't move everything into JS because we want to preserve
index b45e059e6d5e9d87c671223d20dfa3da090e6327..303cc671f4a230c79181cd8ad91ad8b66992b237 100644 (file)
@@ -572,14 +572,6 @@ a.test-arrow {
     right: 5px;
 }
 
-.methods .section-header {
-    /* Override parent class attributes. */
-    border-bottom: none !important;
-    font-size: 1.1em !important;
-    margin: 0 0 -5px;
-    padding: 0;
-}
-
 .section-header:hover a:after {
     content: '\2002\00a7\2002';
 }
index e142c78569bd7ad597e3b71bfcb6db88df487023..05ae8ed5b0b66be71b491821fa99be19f2534f74 100644 (file)
@@ -214,6 +214,30 @@ pub fn last_os_error() -> Error {
     }
 
     /// Creates a new instance of an `Error` from a particular OS error code.
+    ///
+    /// # Examples
+    ///
+    /// On Linux:
+    ///
+    /// ```
+    /// # if cfg!(target_os = "linux") {
+    /// use std::io;
+    ///
+    /// let error = io::Error::from_raw_os_error(98);
+    /// assert_eq!(error.kind(), io::ErrorKind::AddrInUse);
+    /// # }
+    /// ```
+    ///
+    /// On Windows:
+    ///
+    /// ```
+    /// # if cfg!(windows) {
+    /// use std::io;
+    ///
+    /// let error = io::Error::from_raw_os_error(10048);
+    /// assert_eq!(error.kind(), io::ErrorKind::AddrInUse);
+    /// # }
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn from_raw_os_error(code: i32) -> Error {
         Error { repr: Repr::Os(code) }
index 1d97611eabb2671261826c8abfa2a090dcc2f886..a408b4378e19e6b14cdb0f68478adf628f527dc0 100644 (file)
@@ -239,7 +239,7 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
         text[..offset].iter().rposition(|elt| *elt == x)
     }
 
-    // test fallback implementations on all plattforms
+    // test fallback implementations on all platforms
     #[test]
     fn matches_one() {
         assert_eq!(Some(0), memchr(b'a', b"a"));
index ad4cdef615847719d2f63ff078b8f36346a302e9..2d19561139b58144d12df7dc4c37f838199a1505 100644 (file)
@@ -1529,8 +1529,7 @@ pub fn parent(&self) -> Option<&Path> {
 
     /// The final component of the path, if it is a normal file.
     ///
-    /// If the path terminates in `.`, `..`, or consists solely of a root of
-    /// prefix, `file_name` will return `None`.
+    /// If the path terminates in `..`, `file_name` will return `None`.
     ///
     /// # Examples
     ///
@@ -1543,6 +1542,17 @@ pub fn parent(&self) -> Option<&Path> {
     ///
     /// assert_eq!(Some(os_str), path.file_name());
     /// ```
+    ///
+    /// # Other examples
+    ///
+    /// ```
+    /// use std::path::Path;
+    /// use std::ffi::OsStr;
+    ///
+    /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.").file_name());
+    /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.//").file_name());
+    /// assert_eq!(None, Path::new("foo.txt/..").file_name());
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn file_name(&self) -> Option<&OsStr> {
         self.components().next_back().and_then(|p| {
index c670283e559d9b8c60451dc964c617302ccada20..b2b63d0dbb4bd76b4b8fe8ac3121adf460438d67 100644 (file)
@@ -237,7 +237,7 @@ fn mac_result<'a>(path: &ast::Path, ident: Option<Ident>, tts: Vec<TokenTree>, m
                     },
                 });
 
-                let marked_tts = mark_tts(tts, mark);
+                let marked_tts = mark_tts(&tts, mark);
                 Some(expandfun.expand(fld.cx, call_site, &marked_tts))
             }
 
@@ -257,7 +257,7 @@ fn mac_result<'a>(path: &ast::Path, ident: Option<Ident>, tts: Vec<TokenTree>, m
                     }
                 });
 
-                let marked_tts = mark_tts(tts, mark);
+                let marked_tts = mark_tts(&tts, mark);
                 Some(expander.expand(fld.cx, call_site, ident, marked_tts))
             }
 
@@ -769,7 +769,11 @@ fn expand_annotatable(mut item: Annotatable, fld: &mut MacroExpander) -> SmallVe
             };
 
             fld.cx.bt_pop();
-            modified.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect()
+            let configured = modified.into_iter().flat_map(|it| {
+                it.fold_with(&mut fld.strip_unconfigured())
+            }).collect::<SmallVector<_>>();
+
+            configured.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect()
         }
     }
 }
@@ -1126,7 +1130,7 @@ fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac {
         Spanned {
             node: Mac_ {
                 path: self.fold_path(node.path),
-                tts: self.fold_tts(node.tts),
+                tts: self.fold_tts(&node.tts),
             },
             span: self.new_span(span),
         }
@@ -1141,7 +1145,7 @@ fn new_span(&mut self, mut span: Span) -> Span {
 }
 
 // apply a given mark to the given token trees. Used prior to expansion of a macro.
-fn mark_tts(tts: Vec<TokenTree>, m: Mrk) -> Vec<TokenTree> {
+fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> {
     noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
 }
 
index 68527b0797d5b6031da5ca6dcc4a2d9eb2c24341..ffc950d76dd27f43047bb06e54def08e5f265266 100644 (file)
@@ -32,6 +32,7 @@ pub mod rt {
     use ext::base::ExtCtxt;
     use parse::{self, token, classify};
     use ptr::P;
+    use std::rc::Rc;
 
     use tokenstream::{self, TokenTree};
 
@@ -215,12 +216,12 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
             if self.node.style == ast::AttrStyle::Inner {
                 r.push(TokenTree::Token(self.span, token::Not));
             }
-            r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
+            r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
                 delim: token::Bracket,
                 open_span: self.span,
                 tts: self.node.value.to_tokens(cx),
                 close_span: self.span,
-            }));
+            })));
             r
         }
     }
@@ -235,12 +236,12 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
 
     impl ToTokens for () {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
+            vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
                 delim: token::Paren,
                 open_span: DUMMY_SP,
                 tts: vec![],
                 close_span: DUMMY_SP,
-            })]
+            }))]
         }
     }
 
@@ -791,9 +792,14 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
                                 id_ext("tokenstream"),
                                 id_ext("SequenceRepetition")];
             let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
+            let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
+                                                        id_ext("rc"),
+                                                        id_ext("Rc"),
+                                                        id_ext("new")],
+                                                   vec![e_seq_struct]);
             let e_tok = cx.expr_call(sp,
                                      mk_tt_path(cx, sp, "Sequence"),
-                                     vec!(e_sp, e_seq_struct));
+                                     vec!(e_sp, e_rc_new));
             let e_push =
                 cx.expr_method_call(sp,
                                     cx.expr_ident(sp, id_ext("tt")),
index 23f0b1fff0ae72b79c28763d4bfab6befbfc8706..84572b84963f3812511a2efdb4c8622d875a4297 100644 (file)
@@ -28,6 +28,7 @@
 use std::cell::RefCell;
 use std::collections::{HashMap};
 use std::collections::hash_map::{Entry};
+use std::rc::Rc;
 
 struct ParserAnyMacro<'a> {
     parser: RefCell<Parser<'a>>,
@@ -262,7 +263,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
     let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt"));
     let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt"));
     let argument_gram = vec![
-        TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
             tts: vec![
                 TokenTree::Token(DUMMY_SP, match_lhs_tok),
                 TokenTree::Token(DUMMY_SP, token::FatArrow),
@@ -271,14 +272,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
             separator: Some(token::Semi),
             op: tokenstream::KleeneOp::OneOrMore,
             num_captures: 2,
-        }),
+        })),
         // to phase into semicolon-termination instead of semicolon-separation
-        TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
             tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
             separator: None,
             op: tokenstream::KleeneOp::ZeroOrMore,
             num_captures: 0
-        }),
+        })),
     ];
 
     // Parse the macro_rules! invocation (`none` is for no interpolations):
index 40944a9a1c2d360bf13c78a9b1a45d742f2fb998..7c0d10669f30e108e6e173510ebbd2a9e8fc601c 100644 (file)
@@ -79,11 +79,11 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
     let mut r = TtReader {
         sp_diag: sp_diag,
         stack: vec!(TtFrame {
-            forest: TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+            forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
                 tts: src,
                 // doesn't matter. This merely holds the root unzipping.
                 separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
-            }),
+            })),
             idx: 0,
             dotdotdoted: false,
             sep: None,
index ed6f09eed645f7741951542aaeea87665667266c..ac3d643b185cac725fd7139d14ae4541970a3be6 100644 (file)
@@ -28,6 +28,8 @@
 use util::small_vector::SmallVector;
 use util::move_map::MoveMap;
 
+use std::rc::Rc;
+
 pub trait Folder : Sized {
     // Any additions to this trait should happen in form
     // of a call to a public `noop_*` function that only calls
@@ -222,11 +224,11 @@ fn fold_ty_params(&mut self, tps: P<[TyParam]>) -> P<[TyParam]> {
         noop_fold_ty_params(tps, self)
     }
 
-    fn fold_tt(&mut self, tt: TokenTree) -> TokenTree {
+    fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree {
         noop_fold_tt(tt, self)
     }
 
-    fn fold_tts(&mut self, tts: Vec<TokenTree>) -> Vec<TokenTree> {
+    fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec<TokenTree> {
         noop_fold_tts(tts, self)
     }
 
@@ -501,7 +503,7 @@ pub fn noop_fold_mac<T: Folder>(Spanned {node, span}: Mac, fld: &mut T) -> Mac {
     Spanned {
         node: Mac_ {
             path: fld.fold_path(node.path),
-            tts: fld.fold_tts(node.tts),
+            tts: fld.fold_tts(&node.tts),
         },
         span: fld.new_span(span)
     }
@@ -528,26 +530,32 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
     }
 }
 
-pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
-    match tt {
+pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
+    match *tt {
         TokenTree::Token(span, ref tok) =>
             TokenTree::Token(span, fld.fold_token(tok.clone())),
-        TokenTree::Delimited(span, delimed) => TokenTree::Delimited(span, Delimited {
-            delim: delimed.delim,
-            open_span: delimed.open_span,
-            tts: fld.fold_tts(delimed.tts),
-            close_span: delimed.close_span,
-        }),
-        TokenTree::Sequence(span, seq) => TokenTree::Sequence(span, SequenceRepetition {
-            tts: fld.fold_tts(seq.tts),
-            separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
-            ..seq
-        }),
+        TokenTree::Delimited(span, ref delimed) => {
+            TokenTree::Delimited(span, Rc::new(
+                            Delimited {
+                                delim: delimed.delim,
+                                open_span: delimed.open_span,
+                                tts: fld.fold_tts(&delimed.tts),
+                                close_span: delimed.close_span,
+                            }
+                        ))
+        },
+        TokenTree::Sequence(span, ref seq) =>
+            TokenTree::Sequence(span,
+                       Rc::new(SequenceRepetition {
+                           tts: fld.fold_tts(&seq.tts),
+                           separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
+                           ..**seq
+                       })),
     }
 }
 
-pub fn noop_fold_tts<T: Folder>(tts: Vec<TokenTree>, fld: &mut T) -> Vec<TokenTree> {
-    tts.move_map(|tt| fld.fold_tt(tt))
+pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
+    tts.iter().map(|tt| fld.fold_tt(tt)).collect()
 }
 
 // apply ident folder if it's an ident, apply other folds to interpolated nodes
@@ -605,7 +613,7 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
             token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
         token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
         token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
-        token::NtTT(tt) => token::NtTT(tt.map(|tt| fld.fold_tt(tt))),
+        token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
         token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
         token::NtImplItem(arm) =>
             token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm)
index bbcc044d43c6b74b8701924847df75867f0a8ec6..9502bc48a3e110f84e0ddf107a3a3181ec92c06d 100644 (file)
@@ -662,6 +662,7 @@ pub fn integer_lit(s: &str,
 #[cfg(test)]
 mod tests {
     use super::*;
+    use std::rc::Rc;
     use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
     use codemap::Spanned;
     use ast::{self, PatKind};
@@ -763,7 +764,7 @@ fn string_to_tts_macro () {
                             )
                             if first_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
-                            _ => panic!("value 3: {:?}", *first_delimed),
+                            _ => panic!("value 3: {:?}", **first_delimed),
                         }
                         let tts = &second_delimed.tts[..];
                         match (tts.len(), tts.get(0), tts.get(1)) {
@@ -774,10 +775,10 @@ fn string_to_tts_macro () {
                             )
                             if second_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
-                            _ => panic!("value 4: {:?}", *second_delimed),
+                            _ => panic!("value 4: {:?}", **second_delimed),
                         }
                     },
-                    _ => panic!("value 2: {:?}", *macro_delimed),
+                    _ => panic!("value 2: {:?}", **macro_delimed),
                 }
             },
             _ => panic!("value: {:?}",tts),
@@ -793,7 +794,7 @@ fn string_to_tts_1() {
             TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
-                tokenstream::Delimited {
+                Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
@@ -802,10 +803,10 @@ fn string_to_tts_1() {
                         TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
                     ],
                     close_span: sp(13, 14),
-                }),
+                })),
             TokenTree::Delimited(
                 sp(15, 21),
-                tokenstream::Delimited {
+                Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
@@ -813,7 +814,7 @@ fn string_to_tts_1() {
                         TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
-                })
+                }))
         ];
 
         assert_eq!(tts, expected);
index a06270bb7727a4ac3c5927c0afdbbe25f55750e8..e4875b7c244fd9cb8f641fee92312b9954a0dc07 100644 (file)
@@ -495,64 +495,6 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
         }
     }
 
-    /// Check for erroneous `ident { }`; if matches, signal error and
-    /// recover (without consuming any expected input token).  Returns
-    /// true if and only if input was consumed for recovery.
-    pub fn check_for_erroneous_unit_struct_expecting(&mut self,
-                                                     expected: &[token::Token])
-                                                     -> bool {
-        if self.token == token::OpenDelim(token::Brace)
-            && expected.iter().all(|t| *t != token::OpenDelim(token::Brace))
-            && self.look_ahead(1, |t| *t == token::CloseDelim(token::Brace)) {
-            // matched; signal non-fatal error and recover.
-            let span = self.span;
-            self.span_err(span, "unit-like struct construction is written with no trailing `{ }`");
-            self.eat(&token::OpenDelim(token::Brace));
-            self.eat(&token::CloseDelim(token::Brace));
-            true
-        } else {
-            false
-        }
-    }
-
-    /// Commit to parsing a complete expression `e` expected to be
-    /// followed by some token from the set edible + inedible.  Recover
-    /// from anticipated input errors, discarding erroneous characters.
-    pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token],
-                       inedible: &[token::Token]) -> PResult<'a, ()> {
-        debug!("commit_expr {:?}", e);
-        if let ExprKind::Path(..) = e.node {
-            // might be unit-struct construction; check for recoverableinput error.
-            let expected = edible.iter()
-                .cloned()
-                .chain(inedible.iter().cloned())
-                .collect::<Vec<_>>();
-            self.check_for_erroneous_unit_struct_expecting(&expected[..]);
-        }
-        self.expect_one_of(edible, inedible)
-    }
-
-    pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) -> PResult<'a, ()> {
-        self.commit_expr(e, &[edible], &[])
-    }
-
-    /// Commit to parsing a complete statement `s`, which expects to be
-    /// followed by some token from the set edible + inedible.  Check
-    /// for recoverable input errors, discarding erroneous characters.
-    pub fn commit_stmt(&mut self, edible: &[token::Token],
-                       inedible: &[token::Token]) -> PResult<'a, ()> {
-        if self.last_token
-               .as_ref()
-               .map_or(false, |t| t.is_ident() || t.is_path()) {
-            let expected = edible.iter()
-                .cloned()
-                .chain(inedible.iter().cloned())
-                .collect::<Vec<_>>();
-            self.check_for_erroneous_unit_struct_expecting(&expected);
-        }
-        self.expect_one_of(edible, inedible)
-    }
-
     /// returns the span of expr, if it was not interpolated or the span of the interpolated token
     fn interpolated_or_expr_span(&self,
                                  expr: PResult<'a, P<Expr>>)
@@ -1247,7 +1189,7 @@ pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> {
             let default = if self.check(&token::Eq) {
                 self.bump();
                 let expr = self.parse_expr()?;
-                self.commit_expr_expecting(&expr, token::Semi)?;
+                self.expect(&token::Semi)?;
                 Some(expr)
             } else {
                 self.expect(&token::Semi)?;
@@ -2195,8 +2137,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                 let mut trailing_comma = false;
                 while self.token != token::CloseDelim(token::Paren) {
                     es.push(self.parse_expr()?);
-                    self.commit_expr(&es.last().unwrap(), &[],
-                                     &[token::Comma, token::CloseDelim(token::Paren)])?;
+                    self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
                     if self.check(&token::Comma) {
                         trailing_comma = true;
 
@@ -2407,9 +2348,8 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                                     }
                                 }
 
-                                match self.commit_expr(&fields.last().unwrap().expr,
-                                                       &[token::Comma],
-                                                       &[token::CloseDelim(token::Brace)]) {
+                                match self.expect_one_of(&[token::Comma],
+                                                         &[token::CloseDelim(token::Brace)]) {
                                     Ok(()) => {}
                                     Err(mut e) => {
                                         e.emit();
@@ -2662,7 +2602,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: BytePos) -> PResult<
                 self.bump();
                 let ix = self.parse_expr()?;
                 hi = self.span.hi;
-                self.commit_expr_expecting(&ix, token::CloseDelim(token::Bracket))?;
+                self.expect(&token::CloseDelim(token::Bracket))?;
                 let index = self.mk_index(e, ix);
                 e = self.mk_expr(lo, hi, index, ThinVec::new())
               }
@@ -2688,12 +2628,13 @@ fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
                     )?;
                     let (sep, repeat) = self.parse_sep_and_kleene_op()?;
                     let name_num = macro_parser::count_names(&seq);
-                    return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), SequenceRepetition {
-                        tts: seq,
-                        separator: sep,
-                        op: repeat,
-                        num_captures: name_num
-                    }));
+                    return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
+                                      Rc::new(SequenceRepetition {
+                                          tts: seq,
+                                          separator: sep,
+                                          op: repeat,
+                                          num_captures: name_num
+                                      })));
                 } else if self.token.is_keyword(keywords::Crate) {
                     self.bump();
                     return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
@@ -2848,12 +2789,12 @@ pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                     _ => {}
                 }
 
-                Ok(TokenTree::Delimited(span, Delimited {
+                Ok(TokenTree::Delimited(span, Rc::new(Delimited {
                     delim: delim,
                     open_span: open_span,
                     tts: tts,
                     close_span: close_span,
-                }))
+                })))
             },
             _ => {
                 // invariants: the current token is not a left-delimiter,
@@ -3328,8 +3269,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
         let lo = self.last_span.lo;
         let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL,
                                                None)?;
-        if let Err(mut e) = self.commit_expr_expecting(&discriminant,
-                                                       token::OpenDelim(token::Brace)) {
+        if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
             if self.token == token::Token::Semi {
                 e.span_note(match_span, "did you mean to remove this `match` keyword?");
             }
@@ -3375,7 +3315,7 @@ pub fn parse_arm(&mut self) -> PResult<'a, Arm> {
             && self.token != token::CloseDelim(token::Brace);
 
         if require_comma {
-            self.commit_expr(&expr, &[token::Comma], &[token::CloseDelim(token::Brace)])?;
+            self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])?;
         } else {
             self.eat(&token::Comma);
         }
@@ -4117,7 +4057,7 @@ fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P<
                 _ => { // all other kinds of statements:
                     let mut hi = span.hi;
                     if classify::stmt_ends_with_semi(&node) {
-                        self.commit_stmt(&[token::Semi], &[])?;
+                        self.expect(&token::Semi)?;
                         hi = self.last_span.hi;
                     }
 
@@ -4195,7 +4135,7 @@ fn handle_expression_like_statement(&mut self,
         if classify::expr_requires_semi_to_be_stmt(&e) {
             // Just check for errors and recover; do not eat semicolon yet.
             if let Err(mut e) =
-                self.commit_stmt(&[], &[token::Semi, token::CloseDelim(token::Brace)])
+                self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
             {
                 e.emit();
                 self.recover_stmt();
@@ -4862,7 +4802,7 @@ pub fn parse_impl_item(&mut self) -> PResult<'a, ImplItem> {
             let typ = self.parse_ty_sum()?;
             self.expect(&token::Eq)?;
             let expr = self.parse_expr()?;
-            self.commit_expr_expecting(&expr, token::Semi)?;
+            self.expect(&token::Semi)?;
             (name, ast::ImplItemKind::Const(typ, expr))
         } else {
             let (name, inner_attrs, node) = self.parse_impl_method(&vis)?;
@@ -5286,7 +5226,7 @@ fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
         let ty = self.parse_ty_sum()?;
         self.expect(&token::Eq)?;
         let e = self.parse_expr()?;
-        self.commit_expr_expecting(&e, token::Semi)?;
+        self.expect(&token::Semi)?;
         let item = match m {
             Some(m) => ItemKind::Static(ty, m, e),
             None => ItemKind::Const(ty, e),
index 35377d14bab7cc0fe4618fb7be541311b1a987c9..0ad09fd0f7dfba492e8146a64a17ed0a25eb1f69 100644 (file)
@@ -8,18 +8,36 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! # Token Trees
-//! TokenTrees are syntactic forms for dealing with tokens. The description below is
-//! more complete; in short a TokenTree is a single token, a delimited sequence of token
-//! trees, or a sequence with repetition for list splicing as part of macro expansion.
+//! # Token Streams
+//!
+//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
+//! which are themselves either a single Token, a Delimited subsequence of tokens,
+//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
+//! expansion).
+//!
+//! A TokenStream also has a slice view, `TokenSlice`, that is analogous to `str` for
+//! `String`: it allows the programmer to divvy up, explore, and otherwise partition a
+//! TokenStream as borrowed subsequences.
 
-use ast::{AttrStyle};
-use codemap::{Span};
+use ast::{self, AttrStyle, LitKind};
+use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION};
+use codemap::Spanned;
 use ext::base;
 use ext::tt::macro_parser;
 use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use parse::lexer;
-use parse::token;
+use parse;
+use parse::token::{self, Token, Lit, InternedString, Nonterminal};
+use parse::token::Lit as TokLit;
+
+use std::fmt;
+use std::mem;
+use std::ops::Index;
+use std::ops;
+use std::iter::*;
+
+use std::rc::Rc;
 
 /// A delimited sequence of token trees
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -54,6 +72,11 @@ pub fn open_tt(&self) -> TokenTree {
     pub fn close_tt(&self) -> TokenTree {
         TokenTree::Token(self.close_span, self.close_token())
     }
+
+    /// Returns the token trees inside the delimiters.
+    pub fn subtrees(&self) -> &[TokenTree] {
+        &self.tts
+    }
 }
 
 /// A sequence of token trees
@@ -89,18 +112,16 @@ pub enum KleeneOp {
 ///
 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
 pub enum TokenTree {
     /// A single token
     Token(Span, token::Token),
     /// A delimited sequence of token trees
-    Delimited(Span, Delimited),
+    Delimited(Span, Rc<Delimited>),
 
     // This only makes sense in MBE macros.
-
     /// A kleene-style repetition sequence with a span
-    // FIXME(eddyb) #12938 Use DST.
-    Sequence(Span, SequenceRepetition),
+    Sequence(Span, Rc<SequenceRepetition>),
 }
 
 impl TokenTree {
@@ -109,28 +130,22 @@ pub fn len(&self) -> usize {
             TokenTree::Token(_, token::DocComment(name)) => {
                 match doc_comment_style(&name.as_str()) {
                     AttrStyle::Outer => 2,
-                    AttrStyle::Inner => 3
+                    AttrStyle::Inner => 3,
                 }
             }
             TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
             TokenTree::Token(_, token::MatchNt(..)) => 3,
-            TokenTree::Delimited(_, ref delimed) => {
-                delimed.tts.len() + 2
-            }
-            TokenTree::Sequence(_, ref seq) => {
-                seq.tts.len()
-            }
-            TokenTree::Token(..) => 0
+            TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
+            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
+            TokenTree::Token(..) => 0,
         }
     }
 
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
-            (&TokenTree::Token(sp, token::DocComment(_)), 0) => {
-                TokenTree::Token(sp, token::Pound)
-            }
+            (&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound),
             (&TokenTree::Token(sp, token::DocComment(name)), 1)
-            if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
+                if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
                 TokenTree::Token(sp, token::Not)
             }
             (&TokenTree::Token(sp, token::DocComment(name)), _) => {
@@ -138,18 +153,21 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
 
                 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
                 // required to wrap the text.
-                let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
-                    *cnt = if x == '"' {
-                        1
-                    } else if *cnt != 0 && x == '#' {
-                        *cnt + 1
-                    } else {
-                        0
-                    };
-                    Some(*cnt)
-                }).max().unwrap_or(0);
+                let num_of_hashes = stripped.chars()
+                    .scan(0, |cnt, x| {
+                        *cnt = if x == '"' {
+                            1
+                        } else if *cnt != 0 && x == '#' {
+                            *cnt + 1
+                        } else {
+                            0
+                        };
+                        Some(*cnt)
+                    })
+                    .max()
+                    .unwrap_or(0);
 
-                TokenTree::Delimited(sp, Delimited {
+                TokenTree::Delimited(sp, Rc::new(Delimited {
                     delim: token::Bracket,
                     open_span: sp,
                     tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
@@ -157,7 +175,7 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
                               TokenTree::Token(sp, token::Literal(
                                   token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
                     close_span: sp,
-                })
+                }))
             }
             (&TokenTree::Delimited(_, ref delimed), _) => {
                 if index == 0 {
@@ -179,24 +197,24 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
                          TokenTree::Token(sp, token::Ident(kind))];
                 v[index].clone()
             }
-            (&TokenTree::Sequence(_, ref seq), _) => {
-                seq.tts[index].clone()
-            }
-            _ => panic!("Cannot expand a token tree")
+            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
+            _ => panic!("Cannot expand a token tree"),
         }
     }
 
     /// Returns the `Span` corresponding to this token tree.
     pub fn get_span(&self) -> Span {
         match *self {
-            TokenTree::Token(span, _)     => span,
+            TokenTree::Token(span, _) => span,
             TokenTree::Delimited(span, _) => span,
-            TokenTree::Sequence(span, _)  => span,
+            TokenTree::Sequence(span, _) => span,
         }
     }
 
     /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
+    pub fn parse(cx: &base::ExtCtxt,
+                 mtch: &[TokenTree],
+                 tts: &[TokenTree])
                  -> macro_parser::NamedParseResult {
         // `None` is because we're not interpolating
         let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
@@ -206,5 +224,1071 @@ pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
                                                          true);
         macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
     }
+
+    /// Check if this TokenTree is equal to the other, regardless of span information.
+    pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
+        match (self, other) {
+            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
+            (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
+                (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
+                {
+                    for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
+                        if !tt1.eq_unspanned(tt2) {
+                            return false;
+                        }
+                    }
+                    true
+                }
+            }
+            (_, _) => false,
+        }
+    }
+
+    /// Retrieve the TokenTree's span.
+    pub fn span(&self) -> Span {
+        match *self {
+            TokenTree::Token(sp, _) |
+            TokenTree::Delimited(sp, _) |
+            TokenTree::Sequence(sp, _) => sp,
+        }
+    }
+
+    /// Indicates if the stream is a token that is equal to the provided token.
+    pub fn eq_token(&self, t: Token) -> bool {
+        match *self {
+            TokenTree::Token(_, ref tk) => *tk == t,
+            _ => false,
+        }
+    }
+
+    /// Indicates if the token is an identifier.
+    pub fn is_ident(&self) -> bool {
+        self.maybe_ident().is_some()
+    }
+
+    /// Returns an identifier.
+    pub fn maybe_ident(&self) -> Option<ast::Ident> {
+        match *self {
+            TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
+            TokenTree::Delimited(_, ref dl) => {
+                let tts = dl.subtrees();
+                if tts.len() != 1 {
+                    return None;
+                }
+                tts[0].maybe_ident()
+            }
+            _ => None,
+        }
+    }
+
+    /// Returns a Token literal.
+    pub fn maybe_lit(&self) -> Option<token::Lit> {
+        match *self {
+            TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
+            TokenTree::Delimited(_, ref dl) => {
+                let tts = dl.subtrees();
+                if tts.len() != 1 {
+                    return None;
+                }
+                tts[0].maybe_lit()
+            }
+            _ => None,
+        }
+    }
+
+    /// Returns an AST string literal.
+    pub fn maybe_str(&self) -> Option<ast::Lit> {
+        match *self {
+            TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
+                                     ast::StrStyle::Cooked);
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
+                                     ast::StrStyle::Raw(n));
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            _ => None,
+        }
+    }
+}
+
+/// #Token Streams
+///
+/// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural
+/// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we
+/// are going to cut a few corners (i.e., use some of the AST structure) when we need to
+/// for backwards compatibility.
+
+/// TokenStreams are collections of TokenTrees that represent a syntactic structure. The
+/// struct itself shouldn't be directly manipulated; the internal structure is not stable,
+/// and may be changed at any time in the future. The operators will not, however (except
+/// for signatures, later on).
+#[derive(Eq,Clone,Hash,RustcEncodable,RustcDecodable)]
+pub struct TokenStream {
+    pub span: Span,
+    pub tts: Vec<TokenTree>,
+}
+
+impl fmt::Debug for TokenStream {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        if self.tts.len() == 0 {
+            write!(f, "([empty")?;
+        } else {
+            write!(f, "([")?;
+            write!(f, "{:?}", self.tts[0])?;
+
+            for tt in self.tts.iter().skip(1) {
+                write!(f, ",{:?}", tt)?;
+            }
+        }
+        write!(f, "|")?;
+        self.span.fmt(f)?;
+        write!(f, "])")
+    }
+}
+
+/// Checks if two TokenStreams are equivalent (including spans). For unspanned
+/// equality, see `eq_unspanned`.
+impl PartialEq<TokenStream> for TokenStream {
+    fn eq(&self, other: &TokenStream) -> bool {
+        self.tts == other.tts
+    }
+}
+
+// NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span
+// will be at {2,13}. Without finer-grained span structures, however, this seems to be
+// our only recourse.
+// FIXME Do something smarter to compute the expansion id.
+fn covering_span(trees: &[TokenTree]) -> Span {
+    // disregard any dummy spans we have
+    let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>();
+
+    // if we're out of spans, stop
+    if trees.len() < 1 {
+        return DUMMY_SP;
+    }
+
+    // set up the initial values
+    let fst_span = trees[0].span();
+
+    let mut lo_span = fst_span.lo;
+    let mut hi_span = fst_span.hi;
+    let mut expn_id = fst_span.expn_id;
+
+    // compute the spans iteratively
+    for t in trees.iter().skip(1) {
+        let sp = t.span();
+        if sp.lo < lo_span {
+            lo_span = sp.lo;
+        }
+        if hi_span < sp.hi {
+            hi_span = sp.hi;
+        }
+        if expn_id != sp.expn_id {
+            expn_id = NO_EXPANSION;
+        }
+    }
+
+    Span {
+        lo: lo_span,
+        hi: hi_span,
+        expn_id: expn_id,
+    }
 }
 
+/// TokenStream operators include basic destructuring, boolean operations, `maybe_...`
+/// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward,
+/// indicating information about the structure of the stream. The `maybe_...` operations
+/// return `Some<...>` if the tokenstream contains the appropriate item.
+///
+/// Similarly, the `maybe_..._prefix` operations potentially return a
+/// partially-destructured stream as a pair where the first element is the expected item
+/// and the second is the remainder of the stream. As anb example,
+///
+///    `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")`
+impl TokenStream {
+    /// Convert a vector of `TokenTree`s into a `TokenStream`.
+    pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream {
+        let span = covering_span(&trees);
+        TokenStream {
+            tts: trees,
+            span: span,
+        }
+    }
+
+    /// Copies all of the TokenTrees from the TokenSlice, appending them to the stream.
+    pub fn append_stream(mut self, ts2: &TokenSlice) {
+        for tt in ts2.iter() {
+            self.tts.push(tt.clone());
+        }
+        self.span = covering_span(&self.tts[..]);
+    }
+
+    /// Manually change a TokenStream's span.
+    pub fn respan(self, span: Span) -> TokenStream {
+        TokenStream {
+            tts: self.tts,
+            span: span,
+        }
+    }
+
+    /// Construct a TokenStream from an ast literal.
+    pub fn from_ast_lit_str(lit: ast::Lit) -> Option<TokenStream> {
+        match lit.node {
+            LitKind::Str(val, _) => {
+                let val = TokLit::Str_(token::intern(&val));
+                Some(TokenStream::from_tts(vec![TokenTree::Token(lit.span,
+                                                                 Token::Literal(val, None))]))
+            }
+            _ => None,
+        }
+
+    }
+
+    /// Convert a vector of TokenTrees into a parentheses-delimited TokenStream.
+    pub fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream {
+        let new_sp = covering_span(&tts);
+
+        let new_delim = Rc::new(Delimited {
+            delim: token::DelimToken::Paren,
+            open_span: DUMMY_SP,
+            tts: tts,
+            close_span: DUMMY_SP,
+        });
+
+        TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)])
+    }
+
+    /// Convert an interned string into a one-element TokenStream.
+    pub fn from_interned_string_as_ident(s: InternedString) -> TokenStream {
+        TokenStream::from_tts(vec![TokenTree::Token(DUMMY_SP,
+                                                    Token::Ident(token::str_to_ident(&s[..])))])
+    }
+}
+
+/// TokenSlices are 'views' of `TokenStream's; they fit the same role as `str`s do for
+/// `String`s. In general, most TokenStream manipulations will be refocusing their internal
+/// contents by taking a TokenSlice and then using indexing and the provided operators.
+#[derive(PartialEq, Eq, Debug)]
+pub struct TokenSlice([TokenTree]);
+
+impl ops::Deref for TokenStream {
+    type Target = TokenSlice;
+
+    fn deref(&self) -> &TokenSlice {
+        let tts: &[TokenTree] = &*self.tts;
+        unsafe { mem::transmute(tts) }
+    }
+}
+
+impl TokenSlice {
+    /// Convert a borrowed TokenTree slice into a borrowed TokenSlice.
+    fn from_tts(tts: &[TokenTree]) -> &TokenSlice {
+        unsafe { mem::transmute(tts) }
+    }
+
+    /// Indicates whether the `TokenStream` is empty.
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Return the `TokenSlice`'s length.
+    pub fn len(&self) -> usize {
+        self.0.len()
+    }
+
+    /// Check equality versus another TokenStream, ignoring span information.
+    pub fn eq_unspanned(&self, other: &TokenSlice) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+        for (tt1, tt2) in self.iter().zip(other.iter()) {
+            if !tt1.eq_unspanned(tt2) {
+                return false;
+            }
+        }
+        true
+    }
+
+    /// Compute a span that covers the entire TokenSlice (eg, one wide enough to include
+    /// the entire slice). If the inputs share expansion identification, it is preserved.
+    /// If they do not, it is discarded.
+    pub fn covering_span(&self) -> Span {
+        covering_span(&self.0)
+    }
+
+    /// Indicates where the stream is of the form `= <ts>`, where `<ts>` is a continued
+    /// `TokenStream`.
+    pub fn is_assignment(&self) -> bool {
+        self.maybe_assignment().is_some()
+    }
+
+    /// Returns the RHS of an assigment.
+    pub fn maybe_assignment(&self) -> Option<&TokenSlice> {
+        if !(self.len() > 1) {
+            return None;
+        }
+
+        Some(&self[1..])
+    }
+
+    /// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or
+    /// `{a,b,c}`).
+    pub fn is_delimited(&self) -> bool {
+        self.maybe_delimited().is_some()
+    }
+
+    /// Returns the inside of the delimited term as a new TokenStream.
+    pub fn maybe_delimited(&self) -> Option<&TokenSlice> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Delimited(_, ref rc) => Some(TokenSlice::from_tts(&*rc.tts)),
+            _ => None,
+        }
+    }
+
+    /// Returns a list of `TokenSlice`s if the stream is a delimited list, breaking the
+    /// stream on commas.
+    pub fn maybe_comma_list(&self) -> Option<Vec<&TokenSlice>> {
+        let maybe_tts = self.maybe_delimited();
+
+        let ts: &TokenSlice;
+        match maybe_tts {
+            Some(t) => {
+                ts = t;
+            }
+            None => {
+                return None;
+            }
+        }
+
+        let splits: Vec<&TokenSlice> = ts.split(|x| match *x {
+                TokenTree::Token(_, Token::Comma) => true,
+                _ => false,
+            })
+            .filter(|x| x.len() > 0)
+            .collect();
+
+        Some(splits)
+    }
+
+    /// Returns a Nonterminal if it is Interpolated.
+    pub fn maybe_interpolated_nonterminal(&self) -> Option<Nonterminal> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Token(_, Token::Interpolated(ref nt)) => Some(nt.clone()),
+            _ => None,
+        }
+    }
+
+    /// Indicates if the stream is exactly one identifier.
+    pub fn is_ident(&self) -> bool {
+        self.maybe_ident().is_some()
+    }
+
+    /// Returns an identifier
+    pub fn maybe_ident(&self) -> Option<ast::Ident> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        let tok = if let Some(tts) = self.maybe_delimited() {
+            if tts.len() != 1 {
+                return None;
+            }
+            &tts[0]
+        } else {
+            &self[0]
+        };
+
+        match *tok {
+            TokenTree::Token(_, Token::Ident(t)) => Some(t),
+            _ => None,
+        }
+    }
+
+    /// Indicates if the stream is exactly one literal
+    pub fn is_lit(&self) -> bool {
+        self.maybe_lit().is_some()
+    }
+
+    /// Returns a literal
+    pub fn maybe_lit(&self) -> Option<token::Lit> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        let tok = if let Some(tts) = self.maybe_delimited() {
+            if tts.len() != 1 {
+                return None;
+            }
+            &tts[0]
+        } else {
+            &self[0]
+        };
+
+        match *tok {
+            TokenTree::Token(_, Token::Literal(l, _)) => Some(l),
+            _ => None,
+        }
+    }
+
+    /// Returns an AST string literal if the TokenStream is either a normal ('cooked') or
+    /// raw string literal.
+    pub fn maybe_str(&self) -> Option<ast::Lit> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
+                                     ast::StrStyle::Cooked);
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
+                                     ast::StrStyle::Raw(n));
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            _ => None,
+        }
+    }
+
+    /// This operation extracts the path prefix , returning an AST path struct and the remainder
+    /// of the stream (if it finds one). To be more specific, a tokenstream that has a valid,
+    /// non-global path as a prefix (eg `foo(bar, baz)`, `foo::bar(bar)`, but *not*
+    /// `::foo::bar(baz)`) will yield the path and the remaining tokens (as a slice). The previous
+    /// examples will yield
+    /// `Some((Path { segments = vec![foo], ... }, [(bar, baz)]))`,
+    /// `Some((Path { segments = vec![foo, bar] }, [(baz)]))`,
+    /// and `None`, respectively.
+    pub fn maybe_path_prefix(&self) -> Option<(ast::Path, &TokenSlice)> {
+        let mut segments: Vec<ast::PathSegment> = Vec::new();
+
+        let path: Vec<&TokenTree> = self.iter()
+            .take_while(|x| x.is_ident() || x.eq_token(Token::ModSep))
+            .collect::<Vec<&TokenTree>>();
+
+        let path_size = path.len();
+        if path_size == 0 {
+            return None;
+        }
+
+        let cov_span = self[..path_size].covering_span();
+        let rst = &self[path_size..];
+
+        let fst_id = path[0];
+
+        if let Some(id) = fst_id.maybe_ident() {
+            segments.push(ast::PathSegment {
+                identifier: id,
+                parameters: ast::PathParameters::none(),
+            });
+        } else {
+            return None;
+        }
+
+        // Let's use a state machine to parse out the rest.
+        enum State {
+            Mod, // Expect a `::`, or return None otherwise.
+            Ident, // Expect an ident, or return None otherwise.
+        }
+        let mut state = State::Mod;
+
+        for p in &path[1..] {
+            match state {
+                State::Mod => {
+                    // State 0: ['::' -> state 1, else return None]
+                    if p.eq_token(Token::ModSep) {
+                        state = State::Ident;
+                    } else {
+                        return None;
+                    }
+                }
+                State::Ident => {
+                    // State 1: [ident -> state 0, else return None]
+                    if let Some(id) = p.maybe_ident() {
+                        segments.push(ast::PathSegment {
+                            identifier: id,
+                            parameters: ast::PathParameters::none(),
+                        });
+                        state = State::Mod;
+                    } else {
+                        return None;
+                    }
+                }
+            }
+        }
+
+        let path = ast::Path {
+            span: cov_span,
+            global: false,
+            segments: segments,
+        };
+        Some((path, rst))
+    }
+
+    /// Returns an iterator over a TokenSlice (as a sequence of TokenStreams).
+    fn iter(&self) -> Iter {
+        Iter { vs: self }
+    }
+
+    /// Splits a TokenSlice based on the provided `&TokenTree -> bool` predicate.
+    fn split<P>(&self, pred: P) -> Split<P>
+        where P: FnMut(&TokenTree) -> bool
+    {
+        Split {
+            vs: self,
+            pred: pred,
+            finished: false,
+        }
+    }
+}
+
+pub struct Iter<'a> {
+    vs: &'a TokenSlice,
+}
+
+impl<'a> Iterator for Iter<'a> {
+    type Item = &'a TokenTree;
+
+    fn next(&mut self) -> Option<&'a TokenTree> {
+        if self.vs.is_empty() {
+            return None;
+        }
+
+        let ret = Some(&self.vs[0]);
+        self.vs = &self.vs[1..];
+        ret
+    }
+}
+
+pub struct Split<'a, P>
+    where P: FnMut(&TokenTree) -> bool
+{
+    vs: &'a TokenSlice,
+    pred: P,
+    finished: bool,
+}
+
+impl<'a, P> Iterator for Split<'a, P>
+    where P: FnMut(&TokenTree) -> bool
+{
+    type Item = &'a TokenSlice;
+
+    fn next(&mut self) -> Option<&'a TokenSlice> {
+        if self.finished {
+            return None;
+        }
+
+        match self.vs.iter().position(|x| (self.pred)(x)) {
+            None => {
+                self.finished = true;
+                Some(&self.vs[..])
+            }
+            Some(idx) => {
+                let ret = Some(&self.vs[..idx]);
+                self.vs = &self.vs[idx + 1..];
+                ret
+            }
+        }
+    }
+}
+
+impl Index<usize> for TokenStream {
+    type Output = TokenTree;
+
+    fn index(&self, index: usize) -> &TokenTree {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::Range<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeTo<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeFrom<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeFull> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
+        Index::index(&**self, _index)
+    }
+}
+
+impl Index<usize> for TokenSlice {
+    type Output = TokenTree;
+
+    fn index(&self, index: usize) -> &TokenTree {
+        &self.0[index]
+    }
+}
+
+impl ops::Index<ops::Range<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeTo<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeFrom<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeFull> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[_index])
+    }
+}
+
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use ast;
+    use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
+    use parse::token::{self, str_to_ident, Token, Lit};
+    use util::parser_testing::string_to_tts;
+    use std::rc::Rc;
+
+    fn sp(a: u32, b: u32) -> Span {
+        Span {
+            lo: BytePos(a),
+            hi: BytePos(b),
+            expn_id: NO_EXPANSION,
+        }
+    }
+
+    #[test]
+    fn test_is_empty() {
+        let test0 = TokenStream::from_tts(Vec::new());
+        let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
+                                                                Token::Ident(str_to_ident("a")))]);
+        let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+
+        assert_eq!(test0.is_empty(), true);
+        assert_eq!(test1.is_empty(), false);
+        assert_eq!(test2.is_empty(), false);
+    }
+
+    #[test]
+    fn test_is_delimited() {
+        let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        assert_eq!(test0.is_delimited(), false);
+        assert_eq!(test1.is_delimited(), true);
+        assert_eq!(test2.is_delimited(), true);
+        assert_eq!(test3.is_delimited(), false);
+        assert_eq!(test4.is_delimited(), false);
+        assert_eq!(test5.is_delimited(), false);
+    }
+
+    #[test]
+    fn test_is_assign() {
+        let test0 = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("= 5".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
+        let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        assert_eq!(test0.is_assignment(), true);
+        assert_eq!(test1.is_assignment(), true);
+        assert_eq!(test2.is_assignment(), true);
+        assert_eq!(test3.is_assignment(), false);
+        assert_eq!(test4.is_assignment(), true);
+        assert_eq!(test5.is_assignment(), false);
+    }
+
+    #[test]
+    fn test_is_lit() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
+
+        assert_eq!(test0.is_lit(), true);
+        assert_eq!(test1.is_lit(), true);
+        assert_eq!(test2.is_lit(), false);
+        assert_eq!(test3.is_lit(), false);
+        assert_eq!(test4.is_lit(), false);
+    }
+
+    #[test]
+    fn test_is_ident() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
+
+        assert_eq!(test0.is_ident(), false);
+        assert_eq!(test1.is_ident(), false);
+        assert_eq!(test2.is_ident(), true);
+        assert_eq!(test3.is_ident(), false);
+        assert_eq!(test4.is_ident(), false);
+    }
+
+    #[test]
+    fn test_maybe_assignment() {
+        let test0_input = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("= 5".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        let test0 = test0_input.maybe_assignment();
+        let test1 = test1_input.maybe_assignment();
+        let test2 = test2_input.maybe_assignment();
+        let test3 = test3_input.maybe_assignment();
+        let test4 = test4_input.maybe_assignment();
+        let test5 = test5_input.maybe_assignment();
+
+        let test0_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(5, 7), token::ModSep),
+                                       TokenTree::Token(sp(7, 10),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test0, Some(&test0_expected[..]));
+
+        let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
+                                            token::Literal(Lit::Str_(token::intern("5")), None))]);
+        assert_eq!(test1, Some(&test1_expected[..]));
+
+        let test2_expected = TokenStream::from_tts(vec![TokenTree::Token( sp(2,3)
+                                       , token::Literal(
+                                           Lit::Integer(
+                                             token::intern(&(5.to_string()))),
+                                             None))]);
+        assert_eq!(test2, Some(&test2_expected[..]));
+
+        assert_eq!(test3, None);
+
+
+        let test4_tts = vec![TokenTree::Token(sp(3, 6), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(6, 7), token::Comma),
+                             TokenTree::Token(sp(7, 10), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(10, 11), token::Comma),
+                             TokenTree::Token(sp(11, 14), token::Ident(str_to_ident("baz")))];
+
+        let test4_expected = TokenStream::from_tts(vec![TokenTree::Delimited(sp(2, 15),
+                                                Rc::new(Delimited {
+                                                    delim: token::DelimToken::Paren,
+                                                    open_span: sp(2, 3),
+                                                    tts: test4_tts,
+                                                    close_span: sp(14, 15),
+                                                }))]);
+        assert_eq!(test4, Some(&test4_expected[..]));
+
+        assert_eq!(test5, None);
+
+    }
+
+    #[test]
+    fn test_maybe_delimited() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
+            .to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        let test0 = test0_input.maybe_delimited();
+        let test1 = test1_input.maybe_delimited();
+        let test2 = test2_input.maybe_delimited();
+        let test3 = test3_input.maybe_delimited();
+        let test4 = test4_input.maybe_delimited();
+        let test5 = test5_input.maybe_delimited();
+
+        assert_eq!(test0, None);
+
+        let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test1, Some(&test1_expected[..]));
+
+        let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo"))),
+                                       TokenTree::Token(sp(4, 5), token::Comma),
+                                       TokenTree::Token(sp(5, 8),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(8, 9), token::Comma),
+                                       TokenTree::Token(sp(9, 12),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test2, Some(&test2_expected[..]));
+
+        assert_eq!(test3, None);
+
+        assert_eq!(test4, None);
+
+        assert_eq!(test5, None);
+    }
+
+    #[test]
+    fn test_maybe_comma_list() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo::bar,bar,baz)".to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
+            .to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test6_input = TokenStream::from_tts(string_to_tts("".to_string()));
+        // The following is supported behavior!
+        let test7_input = TokenStream::from_tts(string_to_tts("(foo,bar,)".to_string()));
+
+        let test0 = test0_input.maybe_comma_list();
+        let test1 = test1_input.maybe_comma_list();
+        let test2 = test2_input.maybe_comma_list();
+        let test3 = test3_input.maybe_comma_list();
+        let test4 = test4_input.maybe_comma_list();
+        let test5 = test5_input.maybe_comma_list();
+        let test6 = test6_input.maybe_comma_list();
+        let test7 = test7_input.maybe_comma_list();
+
+        assert_eq!(test0, None);
+
+        let test1_stream = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("baz")))]);
+
+        let test1_expected: Vec<&TokenSlice> = vec![&test1_stream[..]];
+        assert_eq!(test1, Some(test1_expected));
+
+        let test2_foo = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo")))]);
+        let test2_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(5, 8),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test2_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(9, 12),
+                                                        token::Ident(str_to_ident("baz")))]);
+        let test2_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..], &test2_baz[..]];
+        assert_eq!(test2, Some(test2_expected));
+
+        let test3_path = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test3_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(10, 13),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test3_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(14, 17),
+                                                        token::Ident(str_to_ident("baz")))]);
+        let test3_expected: Vec<&TokenSlice> =
+            vec![&test3_path[..], &test3_bar[..], &test3_baz[..]];
+        assert_eq!(test3, Some(test3_expected));
+
+        assert_eq!(test4, None);
+
+        assert_eq!(test5, None);
+
+        assert_eq!(test6, None);
+
+
+        let test7_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..]];
+        assert_eq!(test7, Some(test7_expected));
+    }
+
+    // pub fn maybe_ident(&self) -> Option<ast::Ident>
+    #[test]
+    fn test_maybe_ident() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident();
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident();
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident();
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident();
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident();
+
+        assert_eq!(test0, None);
+        assert_eq!(test1, None);
+        assert_eq!(test2, Some(str_to_ident("foo")));
+        assert_eq!(test3, None);
+        assert_eq!(test4, None);
+    }
+
+    // pub fn maybe_lit(&self) -> Option<token::Lit>
+    #[test]
+    fn test_maybe_lit() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_lit();
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_lit();
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_lit();
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_lit();
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_lit();
+
+        assert_eq!(test0, Some(Lit::Str_(token::intern("foo"))));
+        assert_eq!(test1, Some(Lit::Integer(token::intern(&(5.to_string())))));
+        assert_eq!(test2, None);
+        assert_eq!(test3, None);
+        assert_eq!(test4, None);
+    }
+
+    #[test]
+    fn test_maybe_path_prefix() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("foo::bar(bar,baz)".to_string()));
+
+        let test0 = test0_input.maybe_path_prefix();
+        let test1 = test1_input.maybe_path_prefix();
+        let test2 = test2_input.maybe_path_prefix();
+        let test3 = test3_input.maybe_path_prefix();
+
+        let test0_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(7, 9), token::ModSep),
+                             TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("baz")))];
+
+        let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(3, 13),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: sp(3, 4),
+                                                                   tts: test0_tts,
+                                                                   close_span: sp(12, 13),
+                                                               }))]);
+
+        let test0_expected = Some((ast::Path::from_ident(sp(0, 3), str_to_ident("foo")),
+                                   &test0_stream[..]));
+        assert_eq!(test0, test0_expected);
+
+        assert_eq!(test1, None);
+        assert_eq!(test2, None);
+
+        let test3_path = ast::Path {
+            span: sp(0, 8),
+            global: false,
+            segments: vec![ast::PathSegment {
+                               identifier: str_to_ident("foo"),
+                               parameters: ast::PathParameters::none(),
+                           },
+                           ast::PathSegment {
+                               identifier: str_to_ident("bar"),
+                               parameters: ast::PathParameters::none(),
+                           }],
+        };
+
+        let test3_tts = vec![TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(12, 13), token::Comma),
+                             TokenTree::Token(sp(13, 16), token::Ident(str_to_ident("baz")))];
+
+        let test3_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(8, 17),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: sp(8, 9),
+                                                                   tts: test3_tts,
+                                                                   close_span: sp(16, 17),
+                                                               }))]);
+        let test3_expected = Some((test3_path, &test3_stream[..]));
+        assert_eq!(test3, test3_expected);
+    }
+
+    #[test]
+    fn test_as_paren_delimited_stream() {
+        let test0 = TokenStream::as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
+        let test1 = TokenStream::as_paren_delimited_stream(string_to_tts("baz(foo,bar)"
+            .to_string()));
+
+        let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(3, 4), token::Comma),
+                             TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(7, 8), token::Comma)];
+        let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: DUMMY_SP,
+                                                                   tts: test0_tts,
+                                                                   close_span: DUMMY_SP,
+                                                               }))]);
+
+        assert_eq!(test0, test0_stream);
+
+
+        let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(7, 8), token::Comma),
+                             TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))];
+
+        let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))),
+                               TokenTree::Delimited(sp(3, 12),
+                                                    Rc::new(Delimited {
+                                                        delim: token::DelimToken::Paren,
+                                                        open_span: sp(3, 4),
+                                                        tts: test1_tts,
+                                                        close_span: sp(11, 12),
+                                                    }))];
+
+        let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: DUMMY_SP,
+                                                                   tts: test1_parse,
+                                                                   close_span: DUMMY_SP,
+                                                               }))]);
+
+        assert_eq!(test1, test1_stream);
+    }
+
+}
index 56a8c28ffedc2fe6547bca57842d320da96d388a..9cf456062385fcce660b6595b30e20594eaa5e20 100644 (file)
@@ -192,7 +192,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
 
                     if OPTIONS.iter().any(|&opt| s == opt) {
                         cx.span_warn(p.last_span, "expected a clobber, found an option");
+                    } else if s.starts_with("{") || s.ends_with("}") {
+                        cx.span_err(p.last_span, "clobber should not be surrounded by braces");
                     }
+
                     clobs.push(s);
                 }
             }
diff --git a/src/test/compile-fail/asm-bad-clobber.rs b/src/test/compile-fail/asm-bad-clobber.rs
new file mode 100644 (file)
index 0000000..714343a
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-android
+// ignore-arm
+// ignore-aarch64
+
+#![feature(asm, rustc_attrs)]
+
+#[cfg(any(target_arch = "x86",
+          target_arch = "x86_64"))]
+#[rustc_error]
+pub fn main() {
+    unsafe {
+        // clobber formatted as register input/output
+        asm!("xor %eax, %eax" : : : "{eax}");
+        //~^ ERROR clobber should not be surrounded by braces
+    }
+}
index 11d81eda55625960ae69fcca3483de8072e7de35..3f50811f826e07e83a37bd50d8b0fbfac40fc665 100644 (file)
@@ -72,6 +72,7 @@ fn expand_into_foo_multi(cx: &mut ExtCtxt,
                 ..(*quote_item!(cx, enum Foo2 { Bar2, Baz2 }).unwrap()).clone()
             })),
             Annotatable::Item(quote_item!(cx, enum Foo3 { Bar }).unwrap()),
+            Annotatable::Item(quote_item!(cx, #[cfg(any())] fn foo2() {}).unwrap()),
         ],
         Annotatable::ImplItem(it) => vec![
             quote_item!(cx, impl X { fn foo(&self) -> i32 { 42 } }).unwrap().and_then(|i| {
index d17adff007c6335b440c4c42063a753f99f2c91c..fe2317aabea68b43f60c53e0c1e81932b562fdf6 100644 (file)
@@ -21,6 +21,9 @@
 #[derive(PartialEq, Clone, Debug)]
 fn foo() -> AnotherFakeTypeThatHadBetterGoAway {}
 
+// Check that the `#[into_multi_foo]`-generated `foo2` is configured away
+fn foo2() {}
+
 trait Qux {
     #[into_multi_foo]
     fn bar();
diff --git a/src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs b/src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs
new file mode 100644 (file)
index 0000000..48fb05f
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub struct Request {
+    pub id: String,
+    pub arg: String,
+}
+
+pub fn decode<T>() -> Result<Request, ()> {
+    (|| {
+        Ok(Request {
+            id: "hi".to_owned(),
+            arg: match Err(()) {
+                Ok(v) => v,
+                Err(e) => return Err(e)
+            },
+        })
+    })()
+}
diff --git a/src/test/run-pass/issue-27021.rs b/src/test/run-pass/issue-27021.rs
new file mode 100644 (file)
index 0000000..eb7d529
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+    let mut c = (1, (1, "".to_owned()));
+    match c {
+        c2 => { (c.1).0 = 2; assert_eq!((c2.1).0, 1); }
+    }
+
+    let mut c = (1, (1, (1, "".to_owned())));
+    match c.1 {
+        c2 => { ((c.1).1).0 = 3; assert_eq!((c2.1).0, 1); }
+    }
+}
diff --git a/src/test/run-pass/xcrate_generic_fn_nested_return.rs b/src/test/run-pass/xcrate_generic_fn_nested_return.rs
new file mode 100644 (file)
index 0000000..181c916
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:xcrate_generic_fn_nested_return.rs
+
+extern crate xcrate_generic_fn_nested_return as test;
+
+pub fn main() {
+    assert!(test::decode::<()>().is_err());
+}