]> git.lizzy.rs Git - rust.git/commitdiff
Auto merge of #34690 - alexcrichton:clarify-vcvars, r=brson
authorbors <bors@rust-lang.org>
Fri, 8 Jul 2016 22:00:09 +0000 (15:00 -0700)
committerGitHub <noreply@github.com>
Fri, 8 Jul 2016 22:00:09 +0000 (15:00 -0700)
Clarify rustbuild + msvc + vcvars in README

The invocation of vcvars is only needed for versions of Visual Studio that
rustbuild or cmake doesn't understand, but if older versions are installed then
there's no need to call vcvars.

Closes #34576

140 files changed:
RELEASES.md
mk/main.mk
src/bootstrap/Cargo.toml
src/bootstrap/bin/main.rs [new file with mode: 0644]
src/bootstrap/bin/rustc.rs [new file with mode: 0644]
src/bootstrap/bin/rustdoc.rs [new file with mode: 0644]
src/bootstrap/bootstrap.py
src/bootstrap/build/cc.rs [deleted file]
src/bootstrap/build/channel.rs [deleted file]
src/bootstrap/build/check.rs [deleted file]
src/bootstrap/build/clean.rs [deleted file]
src/bootstrap/build/compile.rs [deleted file]
src/bootstrap/build/config.rs [deleted file]
src/bootstrap/build/dist.rs [deleted file]
src/bootstrap/build/doc.rs [deleted file]
src/bootstrap/build/flags.rs [deleted file]
src/bootstrap/build/job.rs [deleted file]
src/bootstrap/build/mod.rs [deleted file]
src/bootstrap/build/native.rs [deleted file]
src/bootstrap/build/sanity.rs [deleted file]
src/bootstrap/build/step.rs [deleted file]
src/bootstrap/build/util.rs [deleted file]
src/bootstrap/cc.rs [new file with mode: 0644]
src/bootstrap/channel.rs [new file with mode: 0644]
src/bootstrap/check.rs [new file with mode: 0644]
src/bootstrap/clean.rs [new file with mode: 0644]
src/bootstrap/compile.rs [new file with mode: 0644]
src/bootstrap/config.rs [new file with mode: 0644]
src/bootstrap/dist.rs [new file with mode: 0644]
src/bootstrap/doc.rs [new file with mode: 0644]
src/bootstrap/flags.rs [new file with mode: 0644]
src/bootstrap/job.rs [new file with mode: 0644]
src/bootstrap/lib.rs
src/bootstrap/main.rs [deleted file]
src/bootstrap/native.rs [new file with mode: 0644]
src/bootstrap/rustc.rs [deleted file]
src/bootstrap/rustdoc.rs [deleted file]
src/bootstrap/sanity.rs [new file with mode: 0644]
src/bootstrap/step.rs [new file with mode: 0644]
src/bootstrap/util.rs [new file with mode: 0644]
src/doc/book/closures.md
src/doc/book/conditional-compilation.md
src/doc/book/documentation.md
src/doc/book/ffi.md
src/doc/book/getting-started.md
src/doc/book/guessing-game.md
src/doc/book/inline-assembly.md
src/doc/book/loops.md
src/doc/book/mutability.md
src/doc/book/raw-pointers.md
src/doc/book/strings.md
src/doc/book/structs.md
src/doc/book/testing.md
src/doc/book/unsafe.md
src/etc/gdb_rust_pretty_printing.py
src/etc/get-stage0.py
src/liballoc/rc.rs
src/libcore/intrinsics.rs
src/libcore/iter/mod.rs
src/libcore/iter/traits.rs
src/libcore/num/f32.rs
src/libcore/num/f64.rs
src/libcore/num/int_macros.rs
src/libcore/num/mod.rs
src/libcore/num/uint_macros.rs
src/librustc/hir/mod.rs
src/librustc/session/config.rs
src/librustc_driver/driver.rs
src/librustc_resolve/lib.rs
src/librustc_save_analysis/dump_visitor.rs
src/librustc_trans/_match.rs
src/librustc_trans/abi.rs
src/librustc_trans/back/link.rs
src/librustc_trans/back/lto.rs
src/librustc_trans/back/symbol_names.rs
src/librustc_trans/back/write.rs
src/librustc_trans/base.rs
src/librustc_trans/callee.rs
src/librustc_trans/closure.rs
src/librustc_trans/collector.rs
src/librustc_trans/consts.rs
src/librustc_trans/context.rs
src/librustc_trans/declare.rs
src/librustc_trans/diagnostics.rs
src/librustc_trans/expr.rs
src/librustc_trans/glue.rs
src/librustc_trans/inline.rs
src/librustc_trans/intrinsic.rs
src/librustc_trans/lib.rs
src/librustc_trans/mir/operand.rs
src/librustc_trans/monomorphize.rs
src/librustc_trans/partitioning.rs
src/librustc_trans/symbol_map.rs [new file with mode: 0644]
src/librustc_trans/symbol_names_test.rs
src/librustc_trans/trans_item.rs
src/librustc_typeck/check/intrinsic.rs
src/librustdoc/clean/mod.rs
src/librustdoc/html/highlight.rs
src/librustdoc/html/render.rs
src/librustdoc/html/static/rustdoc.css
src/libstd/io/error.rs
src/libstd/memchr.rs
src/libstd/path.rs
src/libstd/sys/common/net.rs
src/libsyntax/config.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/tokenstream.rs
src/libsyntax_ext/asm.rs
src/llvm
src/rustllvm/llvm-auto-clean-trigger
src/test/codegen-units/item-collection/cross-crate-closures.rs
src/test/codegen-units/item-collection/non-generic-closures.rs
src/test/codegen-units/partitioning/extern-drop-glue.rs
src/test/codegen-units/partitioning/extern-generic.rs
src/test/codegen-units/partitioning/inlining-from-extern-crate.rs
src/test/codegen-units/partitioning/local-drop-glue.rs
src/test/codegen-units/partitioning/local-generic.rs
src/test/codegen-units/partitioning/local-inlining.rs
src/test/codegen-units/partitioning/local-transitive-inlining.rs
src/test/codegen-units/partitioning/methods-are-with-self-type.rs
src/test/codegen-units/partitioning/regular-modules.rs
src/test/codegen-units/partitioning/statics.rs
src/test/codegen/drop.rs
src/test/compile-fail/asm-bad-clobber.rs [new file with mode: 0644]
src/test/compile-fail/cfg_attr_path.rs [new file with mode: 0644]
src/test/compile-fail/intrinsic-return-address.rs [deleted file]
src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs
src/test/run-pass-fulldeps/macro-crate.rs
src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs [new file with mode: 0644]
src/test/run-pass/intrinsic-return-address.rs [deleted file]
src/test/run-pass/issue-27021.rs [new file with mode: 0644]
src/test/run-pass/mir_trans_calls.rs
src/test/run-pass/xcrate_generic_fn_nested_return.rs [new file with mode: 0644]
src/test/rustdoc/prim-title.rs [new file with mode: 0644]

index ffe8d64ff2da4c71b64338b88b460a5946f63b05..c798c56cd6d03f810d74976a66d2ab53b116c11c 100644 (file)
@@ -172,7 +172,7 @@ Libraries
   (https://github.com/rust-lang/rust/pull/33050).
 * [Implement `Display` and `Hash` for `std::num::Wrapping`]
   (https://github.com/rust-lang/rust/pull/33023).
-* [Add `Default` implementation for `&CStr`, `CString`, `Path`]
+* [Add `Default` implementation for `&CStr`, `CString`]
   (https://github.com/rust-lang/rust/pull/32990).
 * [Implement `From<Vec<T>>` and `Into<Vec<T>>` for `VecDeque<T>`]
   (https://github.com/rust-lang/rust/pull/32866).
index daf656f89c1a5a79bf621baf11b6bc972ae2a193..4c72597f0c5c1d2a2192a24f9d2337a36ae46be9 100644 (file)
@@ -13,7 +13,7 @@
 ######################################################################
 
 # The version number
-CFG_RELEASE_NUM=1.11.0
+CFG_RELEASE_NUM=1.12.0
 
 # An optional number to put after the label, e.g. '.2' -> '-beta.2'
 # NB Make sure it starts with a dot to conform to semver pre-release
index f9a64567ffde0e241a5f8c4d90aaae06e5f5b673..cde4a825be1fb81259d354b3cd266bfc47ab84d9 100644 (file)
@@ -9,15 +9,15 @@ path = "lib.rs"
 
 [[bin]]
 name = "bootstrap"
-path = "main.rs"
+path = "bin/main.rs"
 
 [[bin]]
 name = "rustc"
-path = "rustc.rs"
+path = "bin/rustc.rs"
 
 [[bin]]
 name = "rustdoc"
-path = "rustdoc.rs"
+path = "bin/rustdoc.rs"
 
 [dependencies]
 build_helper = { path = "../build_helper" }
diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs
new file mode 100644 (file)
index 0000000..c47f4fd
--- /dev/null
@@ -0,0 +1,37 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! rustbuild, the Rust build system
+//!
+//! This is the entry point for the build system used to compile the `rustc`
+//! compiler. Lots of documentation can be found in the `README.md` file next to
+//! this file, and otherwise documentation can be found throughout the `build`
+//! directory in each respective module.
+
+#![deny(warnings)]
+
+extern crate bootstrap;
+
+use std::env;
+
+use bootstrap::{Flags, Config, Build};
+
+fn main() {
+    let args = env::args().skip(1).collect::<Vec<_>>();
+    let flags = Flags::parse(&args);
+    let mut config = Config::parse(&flags.build, flags.config.clone());
+
+    // compat with `./configure` while we're still using that
+    if std::fs::metadata("config.mk").is_ok() {
+        config.update_with_config_mk();
+    }
+
+    Build::new(flags, config).build();
+}
diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs
new file mode 100644 (file)
index 0000000..c64cbb9
--- /dev/null
@@ -0,0 +1,165 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
+//!
+//! This shim will take care of some various tasks that our build process
+//! requires that Cargo can't quite do through normal configuration:
+//!
+//! 1. When compiling build scripts and build dependencies, we need a guaranteed
+//!    full standard library available. The only compiler which actually has
+//!    this is the snapshot, so we detect this situation and always compile with
+//!    the snapshot compiler.
+//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
+//!    (and this slightly differs based on a whether we're using a snapshot or
+//!    not), so we do that all here.
+//!
+//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
+//! switching compilers for the bootstrap and for build scripts will probably
+//! never get replaced.
+
+extern crate bootstrap;
+
+use std::env;
+use std::ffi::OsString;
+use std::path::PathBuf;
+use std::process::Command;
+
+fn main() {
+    let args = env::args_os().skip(1).collect::<Vec<_>>();
+    // Detect whether or not we're a build script depending on whether --target
+    // is passed (a bit janky...)
+    let target = args.windows(2).find(|w| &*w[0] == "--target")
+                                .and_then(|w| w[1].to_str());
+
+    // Build scripts always use the snapshot compiler which is guaranteed to be
+    // able to produce an executable, whereas intermediate compilers may not
+    // have the standard library built yet and may not be able to produce an
+    // executable. Otherwise we just use the standard compiler we're
+    // bootstrapping with.
+    let (rustc, libdir) = if target.is_none() {
+        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
+    } else {
+        ("RUSTC_REAL", "RUSTC_LIBDIR")
+    };
+    let stage = env::var("RUSTC_STAGE").unwrap();
+
+    let rustc = env::var_os(rustc).unwrap();
+    let libdir = env::var_os(libdir).unwrap();
+    let mut dylib_path = bootstrap::util::dylib_path();
+    dylib_path.insert(0, PathBuf::from(libdir));
+
+    let mut cmd = Command::new(rustc);
+    cmd.args(&args)
+       .arg("--cfg").arg(format!("stage{}", stage))
+       .env(bootstrap::util::dylib_path_var(),
+            env::join_paths(&dylib_path).unwrap());
+
+    if let Some(target) = target {
+        // The stage0 compiler has a special sysroot distinct from what we
+        // actually downloaded, so we just always pass the `--sysroot` option.
+        cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap());
+
+        // When we build Rust dylibs they're all intended for intermediate
+        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
+        // linking all deps statically into the dylib.
+        cmd.arg("-Cprefer-dynamic");
+
+        // Help the libc crate compile by assisting it in finding the MUSL
+        // native libraries.
+        if let Some(s) = env::var_os("MUSL_ROOT") {
+            let mut root = OsString::from("native=");
+            root.push(&s);
+            root.push("/lib");
+            cmd.arg("-L").arg(&root);
+        }
+
+        // Pass down extra flags, commonly used to configure `-Clinker` when
+        // cross compiling.
+        if let Ok(s) = env::var("RUSTC_FLAGS") {
+            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
+        }
+
+        // If we're compiling specifically the `panic_abort` crate then we pass
+        // the `-C panic=abort` option. Note that we do not do this for any
+        // other crate intentionally as this is the only crate for now that we
+        // ship with panic=abort.
+        //
+        // This... is a bit of a hack how we detect this. Ideally this
+        // information should be encoded in the crate I guess? Would likely
+        // require an RFC amendment to RFC 1513, however.
+        let is_panic_abort = args.windows(2).any(|a| {
+            &*a[0] == "--crate-name" && &*a[1] == "panic_abort"
+        });
+        // FIXME(stage0): remove this `stage != "0"` condition
+        if is_panic_abort && stage != "0" {
+            cmd.arg("-C").arg("panic=abort");
+        }
+
+        // Set various options from config.toml to configure how we're building
+        // code.
+        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
+            cmd.arg("-g");
+        }
+        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
+            Ok(s) => if s == "true" {"y"} else {"n"},
+            Err(..) => "n",
+        };
+        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
+        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
+            cmd.arg("-C").arg(format!("codegen-units={}", s));
+        }
+
+        // Dealing with rpath here is a little special, so let's go into some
+        // detail. First off, `-rpath` is a linker option on Unix platforms
+        // which adds to the runtime dynamic loader path when looking for
+        // dynamic libraries. We use this by default on Unix platforms to ensure
+        // that our nightlies behave the same on Windows, that is they work out
+        // of the box. This can be disabled, of course, but basically that's why
+        // we're gated on RUSTC_RPATH here.
+        //
+        // Ok, so the astute might be wondering "why isn't `-C rpath` used
+        // here?" and that is indeed a good question to task. This codegen
+        // option is the compiler's current interface to generating an rpath.
+        // Unfortunately it doesn't quite suffice for us. The flag currently
+        // takes no value as an argument, so the compiler calculates what it
+        // should pass to the linker as `-rpath`. This unfortunately is based on
+        // the **compile time** directory structure which when building with
+        // Cargo will be very different than the runtime directory structure.
+        //
+        // All that's a really long winded way of saying that if we use
+        // `-Crpath` then the executables generated have the wrong rpath of
+        // something like `$ORIGIN/deps` when in fact the way we distribute
+        // rustc requires the rpath to be `$ORIGIN/../lib`.
+        //
+        // So, all in all, to set up the correct rpath we pass the linker
+        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
+        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
+        // to change a flag in a binary?
+        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
+            let rpath = if target.contains("apple") {
+                Some("-Wl,-rpath,@loader_path/../lib")
+            } else if !target.contains("windows") {
+                Some("-Wl,-rpath,$ORIGIN/../lib")
+            } else {
+                None
+            };
+            if let Some(rpath) = rpath {
+                cmd.arg("-C").arg(format!("link-args={}", rpath));
+            }
+        }
+    }
+
+    // Actually run the compiler!
+    std::process::exit(match cmd.status() {
+        Ok(s) => s.code().unwrap_or(1),
+        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
+    })
+}
diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs
new file mode 100644 (file)
index 0000000..79629bf
--- /dev/null
@@ -0,0 +1,40 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
+//!
+//! See comments in `src/bootstrap/rustc.rs` for more information.
+
+extern crate bootstrap;
+
+use std::env;
+use std::process::Command;
+use std::path::PathBuf;
+
+fn main() {
+    let args = env::args_os().skip(1).collect::<Vec<_>>();
+    let rustdoc = env::var_os("RUSTDOC_REAL").unwrap();
+    let libdir = env::var_os("RUSTC_LIBDIR").unwrap();
+
+    let mut dylib_path = bootstrap::util::dylib_path();
+    dylib_path.insert(0, PathBuf::from(libdir));
+
+    let mut cmd = Command::new(rustdoc);
+    cmd.args(&args)
+       .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap()))
+       .arg("--cfg").arg("dox")
+       .env(bootstrap::util::dylib_path_var(),
+            env::join_paths(&dylib_path).unwrap());
+    std::process::exit(match cmd.status() {
+        Ok(s) => s.code().unwrap_or(1),
+        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
+    })
+}
+
index 33de8fd0107673ccbeb97c4699df506e1f2d143c..17a7c9ca66a2664488d4187d4af588b51a847a36 100644 (file)
@@ -31,8 +31,16 @@ def get(url, path, verbose=False):
 
     try:
         download(sha_path, sha_url, verbose)
+        if os.path.exists(path):
+            if verify(path, sha_path, False):
+                print("using already-download file " + path)
+                return
+            else:
+                print("ignoring already-download file " + path + " due to failed verification")
+                os.unlink(path)
         download(temp_path, url, verbose)
-        verify(temp_path, sha_path, verbose)
+        if not verify(temp_path, sha_path, True):
+            raise RuntimeError("failed verification")
         print("moving {} to {}".format(temp_path, path))
         shutil.move(temp_path, path)
     finally:
@@ -64,13 +72,12 @@ def verify(path, sha_path, verbose):
         found = hashlib.sha256(f.read()).hexdigest()
     with open(sha_path, "r") as f:
         expected, _ = f.readline().split()
-    if found != expected:
-        err = ("invalid checksum:\n"
+    verified = found == expected
+    if not verified and verbose:
+        print("invalid checksum:\n"
                "    found:    {}\n"
                "    expected: {}".format(found, expected))
-        if verbose:
-            raise RuntimeError(err)
-        sys.exit(err)
+    return verified
 
 
 def unpack(tarball, dst, verbose=False, match=None):
@@ -352,7 +359,7 @@ def main():
     parser.add_argument('--clean', action='store_true')
     parser.add_argument('-v', '--verbose', action='store_true')
 
-    args = [a for a in sys.argv if a != '-h']
+    args = [a for a in sys.argv if a != '-h' and a != '--help']
     args, _ = parser.parse_known_args(args)
 
     # Configure initial bootstrap
diff --git a/src/bootstrap/build/cc.rs b/src/bootstrap/build/cc.rs
deleted file mode 100644 (file)
index ff0941a..0000000
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! C-compiler probing and detection.
-//!
-//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
-//! C and C++ compilers for each target configured. A compiler is found through
-//! a number of vectors (in order of precedence)
-//!
-//! 1. Configuration via `target.$target.cc` in `config.toml`.
-//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
-//!    applicable
-//! 3. Special logic to probe on OpenBSD
-//! 4. The `CC_$target` environment variable.
-//! 5. The `CC` environment variable.
-//! 6. "cc"
-//!
-//! Some of this logic is implemented here, but much of it is farmed out to the
-//! `gcc` crate itself, so we end up having the same fallbacks as there.
-//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
-//! used.
-//!
-//! It is intended that after this module has run no C/C++ compiler will
-//! ever be probed for. Instead the compilers found here will be used for
-//! everything.
-
-use std::process::Command;
-
-use build_helper::{cc2ar, output};
-use gcc;
-
-use build::Build;
-use build::config::Target;
-
-pub fn find(build: &mut Build) {
-    // For all targets we're going to need a C compiler for building some shims
-    // and such as well as for being a linker for Rust code.
-    for target in build.config.target.iter() {
-        let mut cfg = gcc::Config::new();
-        cfg.cargo_metadata(false).opt_level(0).debug(false)
-           .target(target).host(&build.config.build);
-
-        let config = build.config.target_config.get(target);
-        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
-            cfg.compiler(cc);
-        } else {
-            set_compiler(&mut cfg, "gcc", target, config);
-        }
-
-        let compiler = cfg.get_compiler();
-        let ar = cc2ar(compiler.path(), target);
-        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
-        if let Some(ref ar) = ar {
-            build.verbose(&format!("AR_{} = {:?}", target, ar));
-        }
-        build.cc.insert(target.to_string(), (compiler, ar));
-    }
-
-    // For all host triples we need to find a C++ compiler as well
-    for host in build.config.host.iter() {
-        let mut cfg = gcc::Config::new();
-        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
-           .target(host).host(&build.config.build);
-        let config = build.config.target_config.get(host);
-        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
-            cfg.compiler(cxx);
-        } else {
-            set_compiler(&mut cfg, "g++", host, config);
-        }
-        let compiler = cfg.get_compiler();
-        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
-        build.cxx.insert(host.to_string(), compiler);
-    }
-}
-
-fn set_compiler(cfg: &mut gcc::Config,
-                gnu_compiler: &str,
-                target: &str,
-                config: Option<&Target>) {
-    match target {
-        // When compiling for android we may have the NDK configured in the
-        // config.toml in which case we look there. Otherwise the default
-        // compiler already takes into account the triple in question.
-        t if t.contains("android") => {
-            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
-                let target = target.replace("armv7", "arm");
-                let compiler = format!("{}-{}", target, gnu_compiler);
-                cfg.compiler(ndk.join("bin").join(compiler));
-            }
-        }
-
-        // The default gcc version from OpenBSD may be too old, try using egcc,
-        // which is a gcc version from ports, if this is the case.
-        t if t.contains("openbsd") => {
-            let c = cfg.get_compiler();
-            if !c.path().ends_with(gnu_compiler) {
-                return
-            }
-
-            let output = output(c.to_command().arg("--version"));
-            let i = match output.find(" 4.") {
-                Some(i) => i,
-                None => return,
-            };
-            match output[i + 3..].chars().next().unwrap() {
-                '0' ... '6' => {}
-                _ => return,
-            }
-            let alternative = format!("e{}", gnu_compiler);
-            if Command::new(&alternative).output().is_ok() {
-                cfg.compiler(alternative);
-            }
-        }
-
-        _ => {}
-    }
-}
diff --git a/src/bootstrap/build/channel.rs b/src/bootstrap/build/channel.rs
deleted file mode 100644 (file)
index 76d061e..0000000
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Build configuration for Rust's release channels.
-//!
-//! Implements the stable/beta/nightly channel distinctions by setting various
-//! flags like the `unstable_features`, calculating variables like `release` and
-//! `package_vers`, and otherwise indicating to the compiler what it should
-//! print out as part of its version information.
-
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::process::Command;
-
-use build_helper::output;
-use md5;
-
-use build::Build;
-
-pub fn collect(build: &mut Build) {
-    // Currently the canonical source for the release number (e.g. 1.10.0) and
-    // the prerelease version (e.g. `.1`) is in `mk/main.mk`. We "parse" that
-    // here to learn about those numbers.
-    let mut main_mk = String::new();
-    t!(t!(File::open(build.src.join("mk/main.mk"))).read_to_string(&mut main_mk));
-    let mut release_num = "";
-    let mut prerelease_version = "";
-    for line in main_mk.lines() {
-        if line.starts_with("CFG_RELEASE_NUM") {
-            release_num = line.split('=').skip(1).next().unwrap().trim();
-        }
-        if line.starts_with("CFG_PRERELEASE_VERSION") {
-            prerelease_version = line.split('=').skip(1).next().unwrap().trim();
-        }
-    }
-
-    // Depending on the channel, passed in `./configure --release-channel`,
-    // determine various properties of the build.
-    match &build.config.channel[..] {
-        "stable" => {
-            build.release = release_num.to_string();
-            build.package_vers = build.release.clone();
-            build.unstable_features = false;
-        }
-        "beta" => {
-            build.release = format!("{}-beta{}", release_num,
-                                   prerelease_version);
-            build.package_vers = "beta".to_string();
-            build.unstable_features = false;
-        }
-        "nightly" => {
-            build.release = format!("{}-nightly", release_num);
-            build.package_vers = "nightly".to_string();
-            build.unstable_features = true;
-        }
-        _ => {
-            build.release = format!("{}-dev", release_num);
-            build.package_vers = build.release.clone();
-            build.unstable_features = true;
-        }
-    }
-    build.version = build.release.clone();
-
-    // If we have a git directory, add in some various SHA information of what
-    // commit this compiler was compiled from.
-    if fs::metadata(build.src.join(".git")).is_ok() {
-        let ver_date = output(Command::new("git").current_dir(&build.src)
-                                      .arg("log").arg("-1")
-                                      .arg("--date=short")
-                                      .arg("--pretty=format:%cd"));
-        let ver_hash = output(Command::new("git").current_dir(&build.src)
-                                      .arg("rev-parse").arg("HEAD"));
-        let short_ver_hash = output(Command::new("git")
-                                            .current_dir(&build.src)
-                                            .arg("rev-parse")
-                                            .arg("--short=9")
-                                            .arg("HEAD"));
-        let ver_date = ver_date.trim().to_string();
-        let ver_hash = ver_hash.trim().to_string();
-        let short_ver_hash = short_ver_hash.trim().to_string();
-        build.version.push_str(&format!(" ({} {})", short_ver_hash,
-                                       ver_date));
-        build.ver_date = Some(ver_date.to_string());
-        build.ver_hash = Some(ver_hash);
-        build.short_ver_hash = Some(short_ver_hash);
-    }
-
-    // Calculate this compiler's bootstrap key, which is currently defined as
-    // the first 8 characters of the md5 of the release string.
-    let key = md5::compute(build.release.as_bytes());
-    build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
-                                  key[0], key[1], key[2], key[3]);
-
-    // Slurp up the stage0 bootstrap key as we're bootstrapping from an
-    // otherwise stable compiler.
-    let mut s = String::new();
-    t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
-    if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
-        if let Some(key) = line.split(": ").nth(1) {
-            build.bootstrap_key_stage0 = key.to_string();
-        }
-    }
-}
diff --git a/src/bootstrap/build/check.rs b/src/bootstrap/build/check.rs
deleted file mode 100644 (file)
index 0a096f8..0000000
+++ /dev/null
@@ -1,414 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of the various `check-*` targets of the build system.
-//!
-//! This file implements the various regression test suites that we execute on
-//! our CI.
-
-use std::env;
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build_helper::output;
-use bootstrap::{dylib_path, dylib_path_var};
-
-use build::{Build, Compiler, Mode};
-use build::util;
-
-const ADB_TEST_DIR: &'static str = "/data/tmp";
-
-/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will verify the validity of all our links in the
-/// documentation to ensure we don't have a bunch of dead ones.
-pub fn linkcheck(build: &Build, stage: u32, host: &str) {
-    println!("Linkcheck stage{} ({})", stage, host);
-    let compiler = Compiler::new(stage, host);
-    build.run(build.tool_cmd(&compiler, "linkchecker")
-                   .arg(build.out.join(host).join("doc")));
-}
-
-/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will check out a few Rust projects and run `cargo
-/// test` to ensure that we don't regress the test suites there.
-pub fn cargotest(build: &Build, stage: u32, host: &str) {
-    let ref compiler = Compiler::new(stage, host);
-
-    // Configure PATH to find the right rustc. NB. we have to use PATH
-    // and not RUSTC because the Cargo test suite has tests that will
-    // fail if rustc is not spelled `rustc`.
-    let path = build.sysroot(compiler).join("bin");
-    let old_path = ::std::env::var("PATH").expect("");
-    let sep = if cfg!(windows) { ";" } else {":" };
-    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
-
-    // Note that this is a short, cryptic, and not scoped directory name. This
-    // is currently to minimize the length of path on Windows where we otherwise
-    // quickly run into path name limit constraints.
-    let out_dir = build.out.join("ct");
-    t!(fs::create_dir_all(&out_dir));
-
-    build.run(build.tool_cmd(compiler, "cargotest")
-                   .env("PATH", newpath)
-                   .arg(&build.cargo)
-                   .arg(&out_dir));
-}
-
-/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` checks up on various bits and pieces of style and
-/// otherwise just implements a few lint-like checks that are specific to the
-/// compiler itself.
-pub fn tidy(build: &Build, stage: u32, host: &str) {
-    println!("tidy check stage{} ({})", stage, host);
-    let compiler = Compiler::new(stage, host);
-    build.run(build.tool_cmd(&compiler, "tidy")
-                   .arg(build.src.join("src")));
-}
-
-fn testdir(build: &Build, host: &str) -> PathBuf {
-    build.out.join(host).join("test")
-}
-
-/// Executes the `compiletest` tool to run a suite of tests.
-///
-/// Compiles all tests with `compiler` for `target` with the specified
-/// compiletest `mode` and `suite` arguments. For example `mode` can be
-/// "run-pass" or `suite` can be something like `debuginfo`.
-pub fn compiletest(build: &Build,
-                   compiler: &Compiler,
-                   target: &str,
-                   mode: &str,
-                   suite: &str) {
-    println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
-    let mut cmd = build.tool_cmd(compiler, "compiletest");
-
-    // compiletest currently has... a lot of arguments, so let's just pass all
-    // of them!
-
-    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
-    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
-    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
-    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
-    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
-    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
-    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
-    cmd.arg("--mode").arg(mode);
-    cmd.arg("--target").arg(target);
-    cmd.arg("--host").arg(compiler.host);
-    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
-
-    let mut flags = vec!["-Crpath".to_string()];
-    if build.config.rust_optimize_tests {
-        flags.push("-O".to_string());
-    }
-    if build.config.rust_debuginfo_tests {
-        flags.push("-g".to_string());
-    }
-
-    let mut hostflags = build.rustc_flags(&compiler.host);
-    hostflags.extend(flags.clone());
-    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
-
-    let mut targetflags = build.rustc_flags(&target);
-    targetflags.extend(flags);
-    targetflags.push(format!("-Lnative={}",
-                             build.test_helpers_out(target).display()));
-    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
-
-    // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
-    let python_default = "python";
-    cmd.arg("--docck-python").arg(python_default);
-
-    if build.config.build.ends_with("apple-darwin") {
-        // Force /usr/bin/python on OSX for LLDB tests because we're loading the
-        // LLDB plugin's compiled module which only works with the system python
-        // (namely not Homebrew-installed python)
-        cmd.arg("--lldb-python").arg("/usr/bin/python");
-    } else {
-        cmd.arg("--lldb-python").arg(python_default);
-    }
-
-    if let Some(ref vers) = build.gdb_version {
-        cmd.arg("--gdb-version").arg(vers);
-    }
-    if let Some(ref vers) = build.lldb_version {
-        cmd.arg("--lldb-version").arg(vers);
-    }
-    if let Some(ref dir) = build.lldb_python_dir {
-        cmd.arg("--lldb-python-dir").arg(dir);
-    }
-
-    cmd.args(&build.flags.args);
-
-    if build.config.verbose || build.flags.verbose {
-        cmd.arg("--verbose");
-    }
-
-    // Only pass correct values for these flags for the `run-make` suite as it
-    // requires that a C++ compiler was configured which isn't always the case.
-    if suite == "run-make" {
-        let llvm_config = build.llvm_config(target);
-        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
-        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
-        cmd.arg("--cc").arg(build.cc(target))
-           .arg("--cxx").arg(build.cxx(target))
-           .arg("--cflags").arg(build.cflags(target).join(" "))
-           .arg("--llvm-components").arg(llvm_components.trim())
-           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
-    } else {
-        cmd.arg("--cc").arg("")
-           .arg("--cxx").arg("")
-           .arg("--cflags").arg("")
-           .arg("--llvm-components").arg("")
-           .arg("--llvm-cxxflags").arg("");
-    }
-
-    // Running a C compiler on MSVC requires a few env vars to be set, to be
-    // sure to set them here.
-    if target.contains("msvc") {
-        for &(ref k, ref v) in build.cc[target].0.env() {
-            if k != "PATH" {
-                cmd.env(k, v);
-            }
-        }
-    }
-    build.add_bootstrap_key(compiler, &mut cmd);
-
-    cmd.arg("--adb-path").arg("adb");
-    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
-    if target.contains("android") {
-        // Assume that cc for this target comes from the android sysroot
-        cmd.arg("--android-cross-path")
-           .arg(build.cc(target).parent().unwrap().parent().unwrap());
-    } else {
-        cmd.arg("--android-cross-path").arg("");
-    }
-
-    build.run(&mut cmd);
-}
-
-/// Run `rustdoc --test` for all documentation in `src/doc`.
-///
-/// This will run all tests in our markdown documentation (e.g. the book)
-/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
-/// `compiler`.
-pub fn docs(build: &Build, compiler: &Compiler) {
-    // Do a breadth-first traversal of the `src/doc` directory and just run
-    // tests for all files that end in `*.md`
-    let mut stack = vec![build.src.join("src/doc")];
-
-    while let Some(p) = stack.pop() {
-        if p.is_dir() {
-            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
-            continue
-        }
-
-        if p.extension().and_then(|s| s.to_str()) != Some("md") {
-            continue
-        }
-
-        println!("doc tests for: {}", p.display());
-        markdown_test(build, compiler, &p);
-    }
-}
-
-/// Run the error index generator tool to execute the tests located in the error
-/// index.
-///
-/// The `error_index_generator` tool lives in `src/tools` and is used to
-/// generate a markdown file from the error indexes of the code base which is
-/// then passed to `rustdoc --test`.
-pub fn error_index(build: &Build, compiler: &Compiler) {
-    println!("Testing error-index stage{}", compiler.stage);
-
-    let output = testdir(build, compiler.host).join("error-index.md");
-    build.run(build.tool_cmd(compiler, "error_index_generator")
-                   .arg("markdown")
-                   .arg(&output)
-                   .env("CFG_BUILD", &build.config.build));
-
-    markdown_test(build, compiler, &output);
-}
-
-fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
-    let mut cmd = Command::new(build.rustdoc(compiler));
-    build.add_rustc_lib_path(compiler, &mut cmd);
-    cmd.arg("--test");
-    cmd.arg(markdown);
-    cmd.arg("--test-args").arg(build.flags.args.join(" "));
-    build.run(&mut cmd);
-}
-
-/// Run all unit tests plus documentation tests for an entire crate DAG defined
-/// by a `Cargo.toml`
-///
-/// This is what runs tests for crates like the standard library, compiler, etc.
-/// It essentially is the driver for running `cargo test`.
-///
-/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
-/// arguments, and those arguments are discovered from `Cargo.lock`.
-pub fn krate(build: &Build,
-             compiler: &Compiler,
-             target: &str,
-             mode: Mode) {
-    let (name, path, features) = match mode {
-        Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()),
-        Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()),
-        Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()),
-        _ => panic!("can only test libraries"),
-    };
-    println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
-             compiler.host, target);
-
-    // Build up the base `cargo test` command.
-    let mut cargo = build.cargo(compiler, mode, target, "test");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(path).join("Cargo.toml"))
-         .arg("--features").arg(features);
-
-    // Generate a list of `-p` arguments to pass to the `cargo test` invocation
-    // by crawling the corresponding Cargo.lock file.
-    let lockfile = build.src.join(path).join("Cargo.lock");
-    let mut contents = String::new();
-    t!(t!(File::open(&lockfile)).read_to_string(&mut contents));
-    let mut lines = contents.lines();
-    while let Some(line) = lines.next() {
-        let prefix = "name = \"";
-        if !line.starts_with(prefix) {
-            continue
-        }
-        lines.next(); // skip `version = ...`
-
-        // skip crates.io or otherwise non-path crates
-        if let Some(line) = lines.next() {
-            if line.starts_with("source") {
-                continue
-            }
-        }
-
-        let crate_name = &line[prefix.len()..line.len() - 1];
-
-        // Right now jemalloc is our only target-specific crate in the sense
-        // that it's not present on all platforms. Custom skip it here for now,
-        // but if we add more this probably wants to get more generalized.
-        if crate_name.contains("jemalloc") {
-            continue
-        }
-
-        cargo.arg("-p").arg(crate_name);
-    }
-
-    // The tests are going to run with the *target* libraries, so we need to
-    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
-    //
-    // Note that to run the compiler we need to run with the *host* libraries,
-    // but our wrapper scripts arrange for that to be the case anyway.
-    let mut dylib_path = dylib_path();
-    dylib_path.insert(0, build.sysroot_libdir(compiler, target));
-    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
-    if target.contains("android") {
-        build.run(cargo.arg("--no-run"));
-        krate_android(build, compiler, target, mode);
-    } else {
-        cargo.args(&build.flags.args);
-        build.run(&mut cargo);
-    }
-}
-
-fn krate_android(build: &Build,
-                 compiler: &Compiler,
-                 target: &str,
-                 mode: Mode) {
-    let mut tests = Vec::new();
-    let out_dir = build.cargo_out(compiler, mode, target);
-    find_tests(&out_dir, target, &mut tests);
-    find_tests(&out_dir.join("deps"), target, &mut tests);
-
-    for test in tests {
-        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
-
-        let test_file_name = test.file_name().unwrap().to_string_lossy();
-        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
-                          ADB_TEST_DIR,
-                          compiler.stage,
-                          target,
-                          compiler.host,
-                          test_file_name);
-        let program = format!("(cd {dir}; \
-                                LD_LIBRARY_PATH=./{target} ./{test} \
-                                    --logfile {log} \
-                                    {args})",
-                              dir = ADB_TEST_DIR,
-                              target = target,
-                              test = test_file_name,
-                              log = log,
-                              args = build.flags.args.join(" "));
-
-        let output = output(Command::new("adb").arg("shell").arg(&program));
-        println!("{}", output);
-        build.run(Command::new("adb")
-                          .arg("pull")
-                          .arg(&log)
-                          .arg(build.out.join("tmp")));
-        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
-        if !output.contains("result: ok") {
-            panic!("some tests failed");
-        }
-    }
-}
-
-fn find_tests(dir: &Path,
-              target: &str,
-              dst: &mut Vec<PathBuf>) {
-    for e in t!(dir.read_dir()).map(|e| t!(e)) {
-        let file_type = t!(e.file_type());
-        if !file_type.is_file() {
-            continue
-        }
-        let filename = e.file_name().into_string().unwrap();
-        if (target.contains("windows") && filename.ends_with(".exe")) ||
-           (!target.contains("windows") && !filename.contains(".")) {
-            dst.push(e.path());
-        }
-    }
-}
-
-pub fn android_copy_libs(build: &Build,
-                         compiler: &Compiler,
-                         target: &str) {
-    println!("Android copy libs to emulator ({})", target);
-    build.run(Command::new("adb").arg("remount"));
-    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
-    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
-    build.run(Command::new("adb")
-                      .arg("push")
-                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
-                      .arg(ADB_TEST_DIR));
-
-    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
-    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
-
-    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
-        let f = t!(f);
-        let name = f.file_name().into_string().unwrap();
-        if util::is_dylib(&name) {
-            build.run(Command::new("adb")
-                              .arg("push")
-                              .arg(f.path())
-                              .arg(&target_dir));
-        }
-    }
-}
diff --git a/src/bootstrap/build/clean.rs b/src/bootstrap/build/clean.rs
deleted file mode 100644 (file)
index 91334bd..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of `make clean` in rustbuild.
-//!
-//! Responsible for cleaning out a build directory of all old and stale
-//! artifacts to prepare for a fresh build. Currently doesn't remove the
-//! `build/cache` directory (download cache) or the `build/$target/llvm`
-//! directory as we want that cached between builds.
-
-use std::fs;
-use std::path::Path;
-
-use build::Build;
-
-pub fn clean(build: &Build) {
-    rm_rf(build, "tmp".as_ref());
-    rm_rf(build, &build.out.join("tmp"));
-
-    for host in build.config.host.iter() {
-
-        let out = build.out.join(host);
-
-        rm_rf(build, &out.join("compiler-rt"));
-        rm_rf(build, &out.join("doc"));
-
-        for stage in 0..4 {
-            rm_rf(build, &out.join(format!("stage{}", stage)));
-            rm_rf(build, &out.join(format!("stage{}-std", stage)));
-            rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
-            rm_rf(build, &out.join(format!("stage{}-tools", stage)));
-            rm_rf(build, &out.join(format!("stage{}-test", stage)));
-        }
-    }
-}
-
-fn rm_rf(build: &Build, path: &Path) {
-    if path.exists() {
-        build.verbose(&format!("removing `{}`", path.display()));
-        t!(fs::remove_dir_all(path));
-    }
-}
diff --git a/src/bootstrap/build/compile.rs b/src/bootstrap/build/compile.rs
deleted file mode 100644 (file)
index 5ed9c1c..0000000
+++ /dev/null
@@ -1,360 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of compiling various phases of the compiler and standard
-//! library.
-//!
-//! This module contains some of the real meat in the rustbuild build system
-//! which is where Cargo is used to compiler the standard library, libtest, and
-//! compiler. This module is also responsible for assembling the sysroot as it
-//! goes along from the output of the previous stage.
-
-use std::collections::HashMap;
-use std::fs;
-use std::path::{Path, PathBuf};
-use std::process::Command;
-
-use build_helper::output;
-
-use build::util::{exe, staticlib, libdir, mtime, is_dylib, copy};
-use build::{Build, Compiler, Mode};
-
-/// Build the standard library.
-///
-/// This will build the standard library for a particular stage of the build
-/// using the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-
-    // Move compiler-rt into place as it'll be required by the compiler when
-    // building the standard library to link the dylib of libstd
-    let libdir = build.sysroot_libdir(compiler, target);
-    let _ = fs::remove_dir_all(&libdir);
-    t!(fs::create_dir_all(&libdir));
-    copy(&build.compiler_rt_built.borrow()[target],
-         &libdir.join(staticlib("compiler-rt", target)));
-
-    // Some platforms have startup objects that may be required to produce the
-    // libstd dynamic library, for example.
-    build_startup_objects(build, target, &libdir);
-
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
-    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
-    cargo.arg("--features").arg(build.std_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"));
-
-    if let Some(target) = build.config.target_config.get(target) {
-        if let Some(ref jemalloc) = target.jemalloc {
-            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
-        }
-    }
-    if let Some(ref p) = build.config.musl_root {
-        if target.contains("musl") {
-            cargo.env("MUSL_ROOT", p);
-        }
-    }
-
-    build.run(&mut cargo);
-    std_link(build, target, compiler, compiler.host);
-}
-
-/// Link all libstd rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn std_link(build: &Build,
-                target: &str,
-                compiler: &Compiler,
-                host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-
-    // If we're linking one compiler host's output into another, then we weren't
-    // called from the `std` method above. In that case we clean out what's
-    // already there and then also link compiler-rt into place.
-    if host != compiler.host {
-        let _ = fs::remove_dir_all(&libdir);
-        t!(fs::create_dir_all(&libdir));
-        copy(&build.compiler_rt_built.borrow()[target],
-             &libdir.join(staticlib("compiler-rt", target)));
-    }
-    add_to_sysroot(&out_dir, &libdir);
-
-    if target.contains("musl") &&
-       (target.contains("x86_64") || target.contains("i686")) {
-        copy_third_party_objects(build, target, &libdir);
-    }
-}
-
-/// Copies the crt(1,i,n).o startup objects
-///
-/// Only required for musl targets that statically link to libc
-fn copy_third_party_objects(build: &Build, target: &str, into: &Path) {
-    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
-        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
-    }
-}
-
-/// Build and prepare startup objects like rsbegin.o and rsend.o
-///
-/// These are primarily used on Windows right now for linking executables/dlls.
-/// They don't require any library support as they're just plain old object
-/// files, so we just use the nightly snapshot compiler to always build them (as
-/// no other compilers are guaranteed to be available).
-fn build_startup_objects(build: &Build, target: &str, into: &Path) {
-    if !target.contains("pc-windows-gnu") {
-        return
-    }
-    let compiler = Compiler::new(0, &build.config.build);
-    let compiler = build.compiler_path(&compiler);
-
-    for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
-        let file = t!(file);
-        build.run(Command::new(&compiler)
-                          .arg("--emit=obj")
-                          .arg("--out-dir").arg(into)
-                          .arg(file.path()));
-    }
-
-    for obj in ["crt2.o", "dllcrt2.o"].iter() {
-        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
-    }
-}
-
-/// Build libtest.
-///
-/// This will build libtest and supporting libraries for a particular stage of
-/// the build using the `compiler` targeting the `target` architecture. The
-/// artifacts created will also be linked into the sysroot directory.
-pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    build.clear_if_dirty(&out_dir, &libstd_shim(build, compiler, target));
-    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
-    build.run(&mut cargo);
-    test_link(build, target, compiler, compiler.host);
-}
-
-/// Link all libtest rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn test_link(build: &Build,
-                 target: &str,
-                 compiler: &Compiler,
-                 host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    add_to_sysroot(&out_dir, &libdir);
-}
-
-/// Build the compiler.
-///
-/// This will build the compiler for a particular stage of the build using
-/// the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
-    println!("Building stage{} compiler artifacts ({} -> {})",
-             compiler.stage, compiler.host, target);
-
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    build.clear_if_dirty(&out_dir, &libtest_shim(build, compiler, target));
-
-    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
-    cargo.arg("--features").arg(build.rustc_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"));
-
-    // Set some configuration variables picked up by build scripts and
-    // the compiler alike
-    cargo.env("CFG_RELEASE", &build.release)
-         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
-         .env("CFG_VERSION", &build.version)
-         .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key)
-         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new()))
-         .env("CFG_LIBDIR_RELATIVE", "lib");
-
-    if let Some(ref ver_date) = build.ver_date {
-        cargo.env("CFG_VER_DATE", ver_date);
-    }
-    if let Some(ref ver_hash) = build.ver_hash {
-        cargo.env("CFG_VER_HASH", ver_hash);
-    }
-    if !build.unstable_features {
-        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
-    }
-    cargo.env("LLVM_CONFIG", build.llvm_config(target));
-    if build.config.llvm_static_stdcpp {
-        cargo.env("LLVM_STATIC_STDCPP",
-                  compiler_file(build.cxx(target), "libstdc++.a"));
-    }
-    if let Some(ref s) = build.config.rustc_default_linker {
-        cargo.env("CFG_DEFAULT_LINKER", s);
-    }
-    if let Some(ref s) = build.config.rustc_default_ar {
-        cargo.env("CFG_DEFAULT_AR", s);
-    }
-    build.run(&mut cargo);
-
-    rustc_link(build, target, compiler, compiler.host);
-}
-
-/// Link all librustc rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated in the given `stage` for `target` produced
-/// by `compiler` into `host`'s sysroot.
-pub fn rustc_link(build: &Build,
-                  target: &str,
-                  compiler: &Compiler,
-                  host: &str) {
-    let target_compiler = Compiler::new(compiler.stage, host);
-    let libdir = build.sysroot_libdir(&target_compiler, target);
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    add_to_sysroot(&out_dir, &libdir);
-}
-
-/// Cargo's output path for the standard library in a given stage, compiled
-/// by a particular compiler for the specified target.
-fn libstd_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libstd, target).join("libstd_shim.rlib")
-}
-
-/// Cargo's output path for libtest in a given stage, compiled by a particular
-/// compiler for the specified target.
-fn libtest_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
-    build.cargo_out(compiler, Mode::Libtest, target).join("libtest_shim.rlib")
-}
-
-fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
-    let out = output(Command::new(compiler)
-                            .arg(format!("-print-file-name={}", file)));
-    PathBuf::from(out.trim())
-}
-
-/// Prepare a new compiler from the artifacts in `stage`
-///
-/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
-/// must have been previously produced by the `stage - 1` build.config.build
-/// compiler.
-pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
-    assert!(stage > 0, "the stage0 compiler isn't assembled, it's downloaded");
-    // The compiler that we're assembling
-    let target_compiler = Compiler::new(stage, host);
-
-    // The compiler that compiled the compiler we're assembling
-    let build_compiler = Compiler::new(stage - 1, &build.config.build);
-
-    // Clear out old files
-    let sysroot = build.sysroot(&target_compiler);
-    let _ = fs::remove_dir_all(&sysroot);
-    t!(fs::create_dir_all(&sysroot));
-
-    // Link in all dylibs to the libdir
-    let sysroot_libdir = sysroot.join(libdir(host));
-    t!(fs::create_dir_all(&sysroot_libdir));
-    let src_libdir = build.sysroot_libdir(&build_compiler, host);
-    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
-        let filename = f.file_name().into_string().unwrap();
-        if is_dylib(&filename) {
-            copy(&f.path(), &sysroot_libdir.join(&filename));
-        }
-    }
-
-    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
-
-    // Link the compiler binary itself into place
-    let rustc = out_dir.join(exe("rustc", host));
-    let bindir = sysroot.join("bin");
-    t!(fs::create_dir_all(&bindir));
-    let compiler = build.compiler_path(&Compiler::new(stage, host));
-    let _ = fs::remove_file(&compiler);
-    copy(&rustc, &compiler);
-
-    // See if rustdoc exists to link it into place
-    let rustdoc = exe("rustdoc", host);
-    let rustdoc_src = out_dir.join(&rustdoc);
-    let rustdoc_dst = bindir.join(&rustdoc);
-    if fs::metadata(&rustdoc_src).is_ok() {
-        let _ = fs::remove_file(&rustdoc_dst);
-        copy(&rustdoc_src, &rustdoc_dst);
-    }
-}
-
-/// Link some files into a rustc sysroot.
-///
-/// For a particular stage this will link all of the contents of `out_dir`
-/// into the sysroot of the `host` compiler, assuming the artifacts are
-/// compiled for the specified `target`.
-fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
-    // Collect the set of all files in the dependencies directory, keyed
-    // off the name of the library. We assume everything is of the form
-    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
-    // `<hash>` values for the same name (of old builds).
-    let mut map = HashMap::new();
-    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
-        let filename = file.file_name().into_string().unwrap();
-
-        // We're only interested in linking rlibs + dylibs, other things like
-        // unit tests don't get linked in
-        if !filename.ends_with(".rlib") &&
-           !filename.ends_with(".lib") &&
-           !is_dylib(&filename) {
-            continue
-        }
-        let file = file.path();
-        let dash = filename.find("-").unwrap();
-        let key = (filename[..dash].to_string(),
-                   file.extension().unwrap().to_owned());
-        map.entry(key).or_insert(Vec::new())
-           .push(file.clone());
-    }
-
-    // For all hash values found, pick the most recent one to move into the
-    // sysroot, that should be the one we just built.
-    for (_, paths) in map {
-        let (_, path) = paths.iter().map(|path| {
-            (mtime(&path).seconds(), path)
-        }).max().unwrap();
-        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
-    }
-}
-
-/// Build a tool in `src/tools`
-///
-/// This will build the specified tool with the specified `host` compiler in
-/// `stage` into the normal cargo output directory.
-pub fn tool(build: &Build, stage: u32, host: &str, tool: &str) {
-    println!("Building stage{} tool {} ({})", stage, tool, host);
-
-    let compiler = Compiler::new(stage, host);
-
-    // FIXME: need to clear out previous tool and ideally deps, may require
-    //        isolating output directories or require a pseudo shim step to
-    //        clear out all the info.
-    //
-    //        Maybe when libstd is compiled it should clear out the rustc of the
-    //        corresponding stage?
-    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
-    // build.clear_if_dirty(&out_dir, &libstd_shim(build, stage, &host, target));
-
-    let mut cargo = build.cargo(&compiler, Mode::Tool, host, "build");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(format!("src/tools/{}/Cargo.toml", tool)));
-    build.run(&mut cargo);
-}
diff --git a/src/bootstrap/build/config.rs b/src/bootstrap/build/config.rs
deleted file mode 100644 (file)
index 498196e..0000000
+++ /dev/null
@@ -1,396 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Serialized configuration of a build.
-//!
-//! This module implements parsing `config.mk` and `config.toml` configuration
-//! files to tweak how the build runs.
-
-use std::collections::HashMap;
-use std::env;
-use std::fs::File;
-use std::io::prelude::*;
-use std::path::PathBuf;
-use std::process;
-
-use num_cpus;
-use rustc_serialize::Decodable;
-use toml::{Parser, Decoder, Value};
-
-/// Global configuration for the entire build and/or bootstrap.
-///
-/// This structure is derived from a combination of both `config.toml` and
-/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
-/// is used all that much, so this is primarily filled out by `config.mk` which
-/// is generated from `./configure`.
-///
-/// Note that this structure is not decoded directly into, but rather it is
-/// filled out from the decoded forms of the structs below. For documentation
-/// each field, see the corresponding fields in
-/// `src/bootstrap/config.toml.example`.
-#[derive(Default)]
-pub struct Config {
-    pub ccache: bool,
-    pub ninja: bool,
-    pub verbose: bool,
-    pub submodules: bool,
-    pub compiler_docs: bool,
-    pub docs: bool,
-    pub target_config: HashMap<String, Target>,
-
-    // llvm codegen options
-    pub llvm_assertions: bool,
-    pub llvm_optimize: bool,
-    pub llvm_version_check: bool,
-    pub llvm_static_stdcpp: bool,
-
-    // rust codegen options
-    pub rust_optimize: bool,
-    pub rust_codegen_units: u32,
-    pub rust_debug_assertions: bool,
-    pub rust_debuginfo: bool,
-    pub rust_rpath: bool,
-    pub rustc_default_linker: Option<String>,
-    pub rustc_default_ar: Option<String>,
-    pub rust_optimize_tests: bool,
-    pub rust_debuginfo_tests: bool,
-
-    pub build: String,
-    pub host: Vec<String>,
-    pub target: Vec<String>,
-    pub rustc: Option<PathBuf>,
-    pub cargo: Option<PathBuf>,
-    pub local_rebuild: bool,
-
-    // libstd features
-    pub debug_jemalloc: bool,
-    pub use_jemalloc: bool,
-
-    // misc
-    pub channel: String,
-    pub musl_root: Option<PathBuf>,
-    pub prefix: Option<String>,
-}
-
-/// Per-target configuration stored in the global configuration structure.
-#[derive(Default)]
-pub struct Target {
-    pub llvm_config: Option<PathBuf>,
-    pub jemalloc: Option<PathBuf>,
-    pub cc: Option<PathBuf>,
-    pub cxx: Option<PathBuf>,
-    pub ndk: Option<PathBuf>,
-}
-
-/// Structure of the `config.toml` file that configuration is read from.
-///
-/// This structure uses `Decodable` to automatically decode a TOML configuration
-/// file into this format, and then this is traversed and written into the above
-/// `Config` structure.
-#[derive(RustcDecodable, Default)]
-struct TomlConfig {
-    build: Option<Build>,
-    llvm: Option<Llvm>,
-    rust: Option<Rust>,
-    target: Option<HashMap<String, TomlTarget>>,
-}
-
-/// TOML representation of various global build decisions.
-#[derive(RustcDecodable, Default, Clone)]
-struct Build {
-    build: Option<String>,
-    host: Vec<String>,
-    target: Vec<String>,
-    cargo: Option<String>,
-    rustc: Option<String>,
-    compiler_docs: Option<bool>,
-    docs: Option<bool>,
-}
-
-/// TOML representation of how the LLVM build is configured.
-#[derive(RustcDecodable, Default)]
-struct Llvm {
-    ccache: Option<bool>,
-    ninja: Option<bool>,
-    assertions: Option<bool>,
-    optimize: Option<bool>,
-    version_check: Option<bool>,
-    static_libstdcpp: Option<bool>,
-}
-
-/// TOML representation of how the Rust build is configured.
-#[derive(RustcDecodable, Default)]
-struct Rust {
-    optimize: Option<bool>,
-    codegen_units: Option<u32>,
-    debug_assertions: Option<bool>,
-    debuginfo: Option<bool>,
-    debug_jemalloc: Option<bool>,
-    use_jemalloc: Option<bool>,
-    default_linker: Option<String>,
-    default_ar: Option<String>,
-    channel: Option<String>,
-    musl_root: Option<String>,
-    rpath: Option<bool>,
-    optimize_tests: Option<bool>,
-    debuginfo_tests: Option<bool>,
-}
-
-/// TOML representation of how each build target is configured.
-#[derive(RustcDecodable, Default)]
-struct TomlTarget {
-    llvm_config: Option<String>,
-    jemalloc: Option<String>,
-    cc: Option<String>,
-    cxx: Option<String>,
-    android_ndk: Option<String>,
-}
-
-impl Config {
-    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
-        let mut config = Config::default();
-        config.llvm_optimize = true;
-        config.use_jemalloc = true;
-        config.rust_optimize = true;
-        config.rust_optimize_tests = true;
-        config.submodules = true;
-        config.docs = true;
-        config.rust_rpath = true;
-        config.rust_codegen_units = 1;
-        config.build = build.to_string();
-        config.channel = "dev".to_string();
-
-        let toml = file.map(|file| {
-            let mut f = t!(File::open(&file));
-            let mut toml = String::new();
-            t!(f.read_to_string(&mut toml));
-            let mut p = Parser::new(&toml);
-            let table = match p.parse() {
-                Some(table) => table,
-                None => {
-                    println!("failed to parse TOML configuration:");
-                    for err in p.errors.iter() {
-                        let (loline, locol) = p.to_linecol(err.lo);
-                        let (hiline, hicol) = p.to_linecol(err.hi);
-                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
-                                 hicol, err.desc);
-                    }
-                    process::exit(2);
-                }
-            };
-            let mut d = Decoder::new(Value::Table(table));
-            match Decodable::decode(&mut d) {
-                Ok(cfg) => cfg,
-                Err(e) => {
-                    println!("failed to decode TOML: {}", e);
-                    process::exit(2);
-                }
-            }
-        }).unwrap_or_else(|| TomlConfig::default());
-
-        let build = toml.build.clone().unwrap_or(Build::default());
-        set(&mut config.build, build.build.clone());
-        config.host.push(config.build.clone());
-        for host in build.host.iter() {
-            if !config.host.contains(host) {
-                config.host.push(host.clone());
-            }
-        }
-        for target in config.host.iter().chain(&build.target) {
-            if !config.target.contains(target) {
-                config.target.push(target.clone());
-            }
-        }
-        config.rustc = build.rustc.map(PathBuf::from);
-        config.cargo = build.cargo.map(PathBuf::from);
-        set(&mut config.compiler_docs, build.compiler_docs);
-        set(&mut config.docs, build.docs);
-
-        if let Some(ref llvm) = toml.llvm {
-            set(&mut config.ccache, llvm.ccache);
-            set(&mut config.ninja, llvm.ninja);
-            set(&mut config.llvm_assertions, llvm.assertions);
-            set(&mut config.llvm_optimize, llvm.optimize);
-            set(&mut config.llvm_version_check, llvm.version_check);
-            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
-        }
-        if let Some(ref rust) = toml.rust {
-            set(&mut config.rust_debug_assertions, rust.debug_assertions);
-            set(&mut config.rust_debuginfo, rust.debuginfo);
-            set(&mut config.rust_optimize, rust.optimize);
-            set(&mut config.rust_optimize_tests, rust.optimize_tests);
-            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
-            set(&mut config.rust_rpath, rust.rpath);
-            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
-            set(&mut config.use_jemalloc, rust.use_jemalloc);
-            set(&mut config.channel, rust.channel.clone());
-            config.rustc_default_linker = rust.default_linker.clone();
-            config.rustc_default_ar = rust.default_ar.clone();
-            config.musl_root = rust.musl_root.clone().map(PathBuf::from);
-
-            match rust.codegen_units {
-                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
-                Some(n) => config.rust_codegen_units = n,
-                None => {}
-            }
-        }
-
-        if let Some(ref t) = toml.target {
-            for (triple, cfg) in t {
-                let mut target = Target::default();
-
-                if let Some(ref s) = cfg.llvm_config {
-                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
-                }
-                if let Some(ref s) = cfg.jemalloc {
-                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
-                }
-                if let Some(ref s) = cfg.android_ndk {
-                    target.ndk = Some(env::current_dir().unwrap().join(s));
-                }
-                target.cxx = cfg.cxx.clone().map(PathBuf::from);
-                target.cc = cfg.cc.clone().map(PathBuf::from);
-
-                config.target_config.insert(triple.clone(), target);
-            }
-        }
-
-        return config
-    }
-
-    /// "Temporary" routine to parse `config.mk` into this configuration.
-    ///
-    /// While we still have `./configure` this implements the ability to decode
-    /// that configuration into this. This isn't exactly a full-blown makefile
-    /// parser, but hey it gets the job done!
-    pub fn update_with_config_mk(&mut self) {
-        let mut config = String::new();
-        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
-        for line in config.lines() {
-            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
-            let key = parts.next().unwrap();
-            let value = match parts.next() {
-                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
-                Some(n) => n,
-                None => continue
-            };
-
-            macro_rules! check {
-                ($(($name:expr, $val:expr),)*) => {
-                    if value == "1" {
-                        $(
-                            if key == concat!("CFG_ENABLE_", $name) {
-                                $val = true;
-                                continue
-                            }
-                            if key == concat!("CFG_DISABLE_", $name) {
-                                $val = false;
-                                continue
-                            }
-                        )*
-                    }
-                }
-            }
-
-            check! {
-                ("CCACHE", self.ccache),
-                ("MANAGE_SUBMODULES", self.submodules),
-                ("COMPILER_DOCS", self.compiler_docs),
-                ("DOCS", self.docs),
-                ("LLVM_ASSERTIONS", self.llvm_assertions),
-                ("OPTIMIZE_LLVM", self.llvm_optimize),
-                ("LLVM_VERSION_CHECK", self.llvm_version_check),
-                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
-                ("OPTIMIZE", self.rust_optimize),
-                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
-                ("DEBUGINFO", self.rust_debuginfo),
-                ("JEMALLOC", self.use_jemalloc),
-                ("DEBUG_JEMALLOC", self.debug_jemalloc),
-                ("RPATH", self.rust_rpath),
-                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
-                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
-                ("LOCAL_REBUILD", self.local_rebuild),
-            }
-
-            match key {
-                "CFG_BUILD" => self.build = value.to_string(),
-                "CFG_HOST" => {
-                    self.host = value.split(" ").map(|s| s.to_string())
-                                     .collect();
-                }
-                "CFG_TARGET" => {
-                    self.target = value.split(" ").map(|s| s.to_string())
-                                       .collect();
-                }
-                "CFG_MUSL_ROOT" if value.len() > 0 => {
-                    self.musl_root = Some(PathBuf::from(value));
-                }
-                "CFG_DEFAULT_AR" if value.len() > 0 => {
-                    self.rustc_default_ar = Some(value.to_string());
-                }
-                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
-                    self.rustc_default_linker = Some(value.to_string());
-                }
-                "CFG_RELEASE_CHANNEL" => {
-                    self.channel = value.to_string();
-                }
-                "CFG_PREFIX" => {
-                    self.prefix = Some(value.to_string());
-                }
-                "CFG_LLVM_ROOT" if value.len() > 0 => {
-                    let target = self.target_config.entry(self.build.clone())
-                                     .or_insert(Target::default());
-                    let root = PathBuf::from(value);
-                    target.llvm_config = Some(root.join("bin/llvm-config"));
-                }
-                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
-                    let target = self.target_config.entry(self.build.clone())
-                                     .or_insert(Target::default());
-                    target.jemalloc = Some(PathBuf::from(value));
-                }
-                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "arm-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "armv7-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "i686-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "aarch64-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
-                    target.ndk = Some(PathBuf::from(value));
-                }
-                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
-                    self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
-                    self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
-                }
-                _ => {}
-            }
-        }
-    }
-}
-
-fn set<T>(field: &mut T, val: Option<T>) {
-    if let Some(v) = val {
-        *field = v;
-    }
-}
diff --git a/src/bootstrap/build/dist.rs b/src/bootstrap/build/dist.rs
deleted file mode 100644 (file)
index 6eed7ea..0000000
+++ /dev/null
@@ -1,319 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of the various distribution aspects of the compiler.
-//!
-//! This module is responsible for creating tarballs of the standard library,
-//! compiler, and documentation. This ends up being what we distribute to
-//! everyone as well.
-//!
-//! No tarball is actually created literally in this file, but rather we shell
-//! out to `rust-installer` still. This may one day be replaced with bits and
-//! pieces of `rustup.rs`!
-
-use std::fs::{self, File};
-use std::io::Write;
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build::{Build, Compiler};
-use build::util::{cp_r, libdir, is_dylib};
-
-fn package_vers(build: &Build) -> &str {
-    match &build.config.channel[..] {
-        "stable" => &build.release,
-        "beta" => "beta",
-        "nightly" => "nightly",
-        _ => &build.release,
-    }
-}
-
-fn distdir(build: &Build) -> PathBuf {
-    build.out.join("dist")
-}
-
-fn tmpdir(build: &Build) -> PathBuf {
-    build.out.join("tmp/dist")
-}
-
-/// Builds the `rust-docs` installer component.
-///
-/// Slurps up documentation from the `stage`'s `host`.
-pub fn docs(build: &Build, stage: u32, host: &str) {
-    println!("Dist docs stage{} ({})", stage, host);
-    let name = format!("rust-docs-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, name));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("share/doc/rust/html");
-    t!(fs::create_dir_all(&dst));
-    let src = build.out.join(host).join("doc");
-    cp_r(&src, &dst);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust-Documentation")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-documentation-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-docs")
-       .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg("--bulk-dirs=share/doc/rust/html");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-
-    // As part of this step, *also* copy the docs directory to a directory which
-    // buildbot typically uploads.
-    if host == build.config.build {
-        let dst = distdir(build).join("doc").join(&build.package_vers);
-        t!(fs::create_dir_all(&dst));
-        cp_r(&src, &dst);
-    }
-}
-
-/// Build the `rust-mingw` installer component.
-///
-/// This contains all the bits and pieces to run the MinGW Windows targets
-/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
-/// Currently just shells out to a python script, but that should be rewritten
-/// in Rust.
-pub fn mingw(build: &Build, host: &str) {
-    println!("Dist mingw ({})", host);
-    let name = format!("rust-mingw-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-
-    // The first argument to the script is a "temporary directory" which is just
-    // thrown away (this contains the runtime DLLs included in the rustc package
-    // above) and the second argument is where to place all the MinGW components
-    // (which is what we want).
-    //
-    // FIXME: this script should be rewritten into Rust
-    let mut cmd = Command::new("python");
-    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
-       .arg(tmpdir(build))
-       .arg(&image)
-       .arg(host);
-    build.run(&mut cmd);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust-MinGW")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-MinGW-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-mingw")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-}
-
-/// Creates the `rustc` installer component.
-pub fn rustc(build: &Build, stage: u32, host: &str) {
-    println!("Dist rustc stage{} ({})", stage, host);
-    let name = format!("rustc-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
-    let _ = fs::remove_dir_all(&overlay);
-
-    // Prepare the rustc "image", what will actually end up getting installed
-    prepare_image(build, stage, host, &image);
-
-    // Prepare the overlay which is part of the tarball but won't actually be
-    // installed
-    let cp = |file: &str| {
-        install(&build.src.join(file), &overlay, 0o644);
-    };
-    cp("COPYRIGHT");
-    cp("LICENSE-APACHE");
-    cp("LICENSE-MIT");
-    cp("README.md");
-    // tiny morsel of metadata is used by rust-packaging
-    let version = &build.version;
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // On MinGW we've got a few runtime DLL dependencies that we need to
-    // include. The first argument to this script is where to put these DLLs
-    // (the image we're creating), and the second argument is a junk directory
-    // to ignore all other MinGW stuff the script creates.
-    //
-    // On 32-bit MinGW we're always including a DLL which needs some extra
-    // licenses to distribute. On 64-bit MinGW we don't actually distribute
-    // anything requiring us to distribute a license, but it's likely the
-    // install will *also* include the rust-mingw package, which also needs
-    // licenses, so to be safe we just include it here in all MinGW packages.
-    //
-    // FIXME: this script should be rewritten into Rust
-    if host.contains("pc-windows-gnu") {
-        let mut cmd = Command::new("python");
-        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
-           .arg(&image)
-           .arg(tmpdir(build))
-           .arg(host);
-        build.run(&mut cmd);
-
-        let dst = image.join("share/doc");
-        t!(fs::create_dir_all(&dst));
-        cp_r(&build.src.join("src/etc/third-party"), &dst);
-    }
-
-    // Finally, wrap everything up in a nice tarball!
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rustc")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-    t!(fs::remove_dir_all(&overlay));
-
-    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
-        let src = build.sysroot(&Compiler::new(stage, host));
-        let libdir = libdir(host);
-
-        // Copy rustc/rustdoc binaries
-        t!(fs::create_dir_all(image.join("bin")));
-        cp_r(&src.join("bin"), &image.join("bin"));
-
-        // Copy runtime DLLs needed by the compiler
-        if libdir != "bin" {
-            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
-                let name = entry.file_name();
-                if let Some(s) = name.to_str() {
-                    if is_dylib(s) {
-                        install(&entry.path(), &image.join(libdir), 0o644);
-                    }
-                }
-            }
-        }
-
-        // Man pages
-        t!(fs::create_dir_all(image.join("share/man/man1")));
-        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
-
-        // Debugger scripts
-        debugger_scripts(build, &image, host);
-
-        // Misc license info
-        let cp = |file: &str| {
-            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
-        };
-        cp("COPYRIGHT");
-        cp("LICENSE-APACHE");
-        cp("LICENSE-MIT");
-        cp("README.md");
-    }
-}
-
-/// Copies debugger scripts for `host` into the `sysroot` specified.
-pub fn debugger_scripts(build: &Build,
-                        sysroot: &Path,
-                        host: &str) {
-    let cp_debugger_script = |file: &str| {
-        let dst = sysroot.join("lib/rustlib/etc");
-        t!(fs::create_dir_all(&dst));
-        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
-    };
-    if host.contains("windows-msvc") {
-        // no debugger scripts
-    } else {
-        cp_debugger_script("debugger_pretty_printers_common.py");
-
-        // gdb debugger scripts
-        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
-                0o755);
-
-        cp_debugger_script("gdb_load_rust_pretty_printers.py");
-        cp_debugger_script("gdb_rust_pretty_printing.py");
-
-        // lldb debugger scripts
-        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
-                0o755);
-
-        cp_debugger_script("lldb_rust_formatters.py");
-    }
-}
-
-/// Creates the `rust-std` installer component as compiled by `compiler` for the
-/// target `target`.
-pub fn std(build: &Build, compiler: &Compiler, target: &str) {
-    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
-             target);
-    let name = format!("rust-std-{}", package_vers(build));
-    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("lib/rustlib").join(target);
-    t!(fs::create_dir_all(&dst));
-    let src = build.sysroot(compiler).join("lib/rustlib");
-    cp_r(&src.join(target), &dst);
-
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=std-is-standing-at-the-ready.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg(format!("--component-name=rust-std-{}", target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-}
-
-fn install(src: &Path, dstdir: &Path, perms: u32) {
-    let dst = dstdir.join(src.file_name().unwrap());
-    t!(fs::create_dir_all(dstdir));
-    t!(fs::copy(src, &dst));
-    chmod(&dst, perms);
-}
-
-#[cfg(unix)]
-fn chmod(path: &Path, perms: u32) {
-    use std::os::unix::fs::*;
-    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
-}
-#[cfg(windows)]
-fn chmod(_path: &Path, _perms: u32) {}
-
-// We have to run a few shell scripts, which choke quite a bit on both `\`
-// characters and on `C:\` paths, so normalize both of them away.
-fn sanitize_sh(path: &Path) -> String {
-    let path = path.to_str().unwrap().replace("\\", "/");
-    return change_drive(&path).unwrap_or(path);
-
-    fn change_drive(s: &str) -> Option<String> {
-        let mut ch = s.chars();
-        let drive = ch.next().unwrap_or('C');
-        if ch.next() != Some(':') {
-            return None
-        }
-        if ch.next() != Some('/') {
-            return None
-        }
-        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
-    }
-}
diff --git a/src/bootstrap/build/doc.rs b/src/bootstrap/build/doc.rs
deleted file mode 100644 (file)
index f7cc742..0000000
+++ /dev/null
@@ -1,207 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Documentation generation for rustbuild.
-//!
-//! This module implements generation for all bits and pieces of documentation
-//! for the Rust project. This notably includes suites like the rust book, the
-//! nomicon, standalone documentation, etc.
-//!
-//! Everything here is basically just a shim around calling either `rustbook` or
-//! `rustdoc`.
-
-use std::fs::{self, File};
-use std::io::prelude::*;
-use std::path::Path;
-use std::process::Command;
-
-use build::{Build, Compiler, Mode};
-use build::util::{up_to_date, cp_r};
-
-/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
-/// `name` into the `out` path.
-///
-/// This will not actually generate any documentation if the documentation has
-/// already been generated.
-pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {
-    t!(fs::create_dir_all(out));
-
-    let out = out.join(name);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let src = build.src.join("src/doc").join(name);
-    let index = out.join("index.html");
-    let rustbook = build.tool(&compiler, "rustbook");
-    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
-        return
-    }
-    println!("Rustbook stage{} ({}) - {}", stage, target, name);
-    let _ = fs::remove_dir_all(&out);
-    build.run(build.tool_cmd(&compiler, "rustbook")
-                   .arg("build")
-                   .arg(&src)
-                   .arg(out));
-}
-
-/// Generates all standalone documentation as compiled by the rustdoc in `stage`
-/// for the `target` into `out`.
-///
-/// This will list all of `src/doc` looking for markdown files and appropriately
-/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
-/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
-///
-/// In the end, this is just a glorified wrapper around rustdoc!
-pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} standalone ({})", stage, target);
-    t!(fs::create_dir_all(out));
-
-    let compiler = Compiler::new(stage, &build.config.build);
-
-    let favicon = build.src.join("src/doc/favicon.inc");
-    let footer = build.src.join("src/doc/footer.inc");
-    let full_toc = build.src.join("src/doc/full-toc.inc");
-    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
-
-    let version_input = build.src.join("src/doc/version_info.html.template");
-    let version_info = out.join("version_info.html");
-
-    if !up_to_date(&version_input, &version_info) {
-        let mut info = String::new();
-        t!(t!(File::open(&version_input)).read_to_string(&mut info));
-        let blank = String::new();
-        let short = build.short_ver_hash.as_ref().unwrap_or(&blank);
-        let hash = build.ver_hash.as_ref().unwrap_or(&blank);
-        let info = info.replace("VERSION", &build.release)
-                       .replace("SHORT_HASH", short)
-                       .replace("STAMP", hash);
-        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
-    }
-
-    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
-        let file = t!(file);
-        let path = file.path();
-        let filename = path.file_name().unwrap().to_str().unwrap();
-        if !filename.ends_with(".md") || filename == "README.md" {
-            continue
-        }
-
-        let html = out.join(filename).with_extension("html");
-        let rustdoc = build.rustdoc(&compiler);
-        if up_to_date(&path, &html) &&
-           up_to_date(&footer, &html) &&
-           up_to_date(&favicon, &html) &&
-           up_to_date(&full_toc, &html) &&
-           up_to_date(&version_info, &html) &&
-           up_to_date(&rustdoc, &html) {
-            continue
-        }
-
-        let mut cmd = Command::new(&rustdoc);
-        build.add_rustc_lib_path(&compiler, &mut cmd);
-        cmd.arg("--html-after-content").arg(&footer)
-           .arg("--html-before-content").arg(&version_info)
-           .arg("--html-in-header").arg(&favicon)
-           .arg("--markdown-playground-url")
-           .arg("https://play.rust-lang.org/")
-           .arg("-o").arg(out)
-           .arg(&path);
-
-        if filename == "reference.md" {
-           cmd.arg("--html-in-header").arg(&full_toc);
-        }
-
-        if filename == "not_found.md" {
-            cmd.arg("--markdown-no-toc")
-               .arg("--markdown-css")
-               .arg("https://doc.rust-lang.org/rust.css");
-        } else {
-            cmd.arg("--markdown-css").arg("rust.css");
-        }
-        build.run(&mut cmd);
-    }
-}
-
-/// Compile all standard library documentation.
-///
-/// This will generate all documentation for the standard library and its
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} std ({})", stage, target);
-    t!(fs::create_dir_all(out));
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Libstd)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    build.clear_if_dirty(&out_dir, &rustdoc);
-
-    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"))
-         .arg("--features").arg(build.std_features());
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Compile all libtest documentation.
-///
-/// This will generate all documentation for libtest and its dependencies. This
-/// is largely just a wrapper around `cargo doc`.
-pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} test ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Libtest)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    build.clear_if_dirty(&out_dir, &rustdoc);
-
-    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Generate all compiler documentation.
-///
-/// This will generate all documentation for the compiler libraries and their
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} compiler ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.config.build);
-    let out_dir = build.stage_out(&compiler, Mode::Librustc)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-    if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) {
-        t!(fs::remove_dir_all(&out_dir));
-    }
-    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"))
-         .arg("--features").arg(build.rustc_features());
-    build.run(&mut cargo);
-    cp_r(&out_dir, out)
-}
-
-/// Generates the HTML rendered error-index by running the
-/// `error_index_generator` tool.
-pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {
-    println!("Documenting stage{} error index ({})", stage, target);
-    t!(fs::create_dir_all(out));
-    let compiler = Compiler::new(stage, &build.config.build);
-    let mut index = build.tool_cmd(&compiler, "error_index_generator");
-    index.arg("html");
-    index.arg(out.join("error-index.html"));
-
-    // FIXME: shouldn't have to pass this env var
-    index.env("CFG_BUILD", &build.config.build);
-
-    build.run(&mut index);
-}
diff --git a/src/bootstrap/build/flags.rs b/src/bootstrap/build/flags.rs
deleted file mode 100644 (file)
index d925997..0000000
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Command-line interface of the rustbuild build system.
-//!
-//! This module implements the command-line parsing of the build system which
-//! has various flags to configure how it's run.
-
-use std::fs;
-use std::path::PathBuf;
-use std::process;
-use std::slice;
-
-use getopts::Options;
-
-/// Deserialized version of all flags for this compile.
-pub struct Flags {
-    pub verbose: bool,
-    pub stage: Option<u32>,
-    pub build: String,
-    pub host: Filter,
-    pub target: Filter,
-    pub step: Vec<String>,
-    pub config: Option<PathBuf>,
-    pub src: Option<PathBuf>,
-    pub jobs: Option<u32>,
-    pub args: Vec<String>,
-    pub clean: bool,
-}
-
-pub struct Filter {
-    values: Vec<String>,
-}
-
-impl Flags {
-    pub fn parse(args: &[String]) -> Flags {
-        let mut opts = Options::new();
-        opts.optflag("v", "verbose", "use verbose output");
-        opts.optopt("", "config", "TOML configuration file for build", "FILE");
-        opts.optmulti("", "host", "host targets to build", "HOST");
-        opts.reqopt("", "build", "build target of the stage0 compiler", "BUILD");
-        opts.optmulti("", "target", "targets to build", "TARGET");
-        opts.optmulti("s", "step", "build step to execute", "STEP");
-        opts.optopt("", "stage", "stage to build", "N");
-        opts.optopt("", "src", "path to repo root", "DIR");
-        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
-        opts.optflag("", "clean", "clean output directory");
-        opts.optflag("h", "help", "print this help message");
-
-        let usage = |n| -> ! {
-            let brief = format!("Usage: rust.py [options]");
-            print!("{}", opts.usage(&brief));
-            process::exit(n);
-        };
-
-        let m = opts.parse(args).unwrap_or_else(|e| {
-            println!("failed to parse options: {}", e);
-            usage(1);
-        });
-        if m.opt_present("h") {
-            usage(0);
-        }
-
-        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
-            if fs::metadata("config.toml").is_ok() {
-                Some(PathBuf::from("config.toml"))
-            } else {
-                None
-            }
-        });
-
-        Flags {
-            verbose: m.opt_present("v"),
-            clean: m.opt_present("clean"),
-            stage: m.opt_str("stage").map(|j| j.parse().unwrap()),
-            build: m.opt_str("build").unwrap(),
-            host: Filter { values: m.opt_strs("host") },
-            target: Filter { values: m.opt_strs("target") },
-            step: m.opt_strs("step"),
-            config: cfg_file,
-            src: m.opt_str("src").map(PathBuf::from),
-            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
-            args: m.free.clone(),
-        }
-    }
-}
-
-impl Filter {
-    pub fn contains(&self, name: &str) -> bool {
-        self.values.len() == 0 || self.values.iter().any(|s| s == name)
-    }
-
-    pub fn iter(&self) -> slice::Iter<String> {
-        self.values.iter()
-    }
-}
diff --git a/src/bootstrap/build/job.rs b/src/bootstrap/build/job.rs
deleted file mode 100644 (file)
index 4558e6f..0000000
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Job management on Windows for bootstrapping
-//!
-//! Most of the time when you're running a build system (e.g. make) you expect
-//! Ctrl-C or abnormal termination to actually terminate the entire tree of
-//! process in play, not just the one at the top. This currently works "by
-//! default" on Unix platforms because Ctrl-C actually sends a signal to the
-//! *process group* rather than the parent process, so everything will get torn
-//! down. On Windows, however, this does not happen and Ctrl-C just kills the
-//! parent process.
-//!
-//! To achieve the same semantics on Windows we use Job Objects to ensure that
-//! all processes die at the same time. Job objects have a mode of operation
-//! where when all handles to the object are closed it causes all child
-//! processes associated with the object to be terminated immediately.
-//! Conveniently whenever a process in the job object spawns a new process the
-//! child will be associated with the job object as well. This means if we add
-//! ourselves to the job object we create then everything will get torn down!
-//!
-//! Unfortunately most of the time the build system is actually called from a
-//! python wrapper (which manages things like building the build system) so this
-//! all doesn't quite cut it so far. To go the last mile we duplicate the job
-//! object handle into our parent process (a python process probably) and then
-//! close our own handle. This means that the only handle to the job object
-//! resides in the parent python process, so when python dies the whole build
-//! system dies (as one would probably expect!).
-//!
-//! Note that this module has a #[cfg(windows)] above it as none of this logic
-//! is required on Unix.
-
-extern crate kernel32;
-extern crate winapi;
-
-use std::env;
-use std::io;
-use std::mem;
-
-use self::winapi::*;
-use self::kernel32::*;
-
-pub unsafe fn setup() {
-    // Create a new job object for us to use
-    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
-    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
-
-    // Indicate that when all handles to the job object are gone that all
-    // process in the object should be killed. Note that this includes our
-    // entire process tree by default because we've added ourselves and our
-    // children will reside in the job by default.
-    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
-    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
-    let r = SetInformationJobObject(job,
-                                    JobObjectExtendedLimitInformation,
-                                    &mut info as *mut _ as LPVOID,
-                                    mem::size_of_val(&info) as DWORD);
-    assert!(r != 0, "{}", io::Error::last_os_error());
-
-    // Assign our process to this job object. Note that if this fails, one very
-    // likely reason is that we are ourselves already in a job object! This can
-    // happen on the build bots that we've got for Windows, or if just anyone
-    // else is instrumenting the build. In this case we just bail out
-    // immediately and assume that they take care of it.
-    //
-    // Also note that nested jobs (why this might fail) are supported in recent
-    // versions of Windows, but the version of Windows that our bots are running
-    // at least don't support nested job objects.
-    let r = AssignProcessToJobObject(job, GetCurrentProcess());
-    if r == 0 {
-        CloseHandle(job);
-        return
-    }
-
-    // If we've got a parent process (e.g. the python script that called us)
-    // then move ownership of this job object up to them. That way if the python
-    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
-    //
-    // If we don't have a parent (e.g. this was run directly) then we
-    // intentionally leak the job object handle. When our process exits
-    // (normally or abnormally) it will close the handle implicitly, causing all
-    // processes in the job to be cleaned up.
-    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
-        Ok(s) => s,
-        Err(..) => return,
-    };
-
-    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
-    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
-    let mut parent_handle = 0 as *mut _;
-    let r = DuplicateHandle(GetCurrentProcess(), job,
-                            parent, &mut parent_handle,
-                            0, FALSE, DUPLICATE_SAME_ACCESS);
-
-    // If this failed, well at least we tried! An example of DuplicateHandle
-    // failing in the past has been when the wrong python2 package spawed this
-    // build system (e.g. the `python2` package in MSYS instead of
-    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
-    // mode" here is that we only clean everything up when the build system
-    // dies, not when the python parent does, so not too bad.
-    if r != 0 {
-        CloseHandle(job);
-    }
-}
diff --git a/src/bootstrap/build/mod.rs b/src/bootstrap/build/mod.rs
deleted file mode 100644 (file)
index 195d1bc..0000000
+++ /dev/null
@@ -1,871 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Implementation of rustbuild, the Rust build system.
-//!
-//! This module, and its descendants, are the implementation of the Rust build
-//! system. Most of this build system is backed by Cargo but the outer layer
-//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
-//! builds, building artifacts like LLVM, etc.
-//!
-//! More documentation can be found in each respective module below.
-
-use std::cell::RefCell;
-use std::collections::HashMap;
-use std::env;
-use std::fs::{self, File};
-use std::path::{PathBuf, Path};
-use std::process::Command;
-
-use build_helper::{run_silent, output};
-use gcc;
-use num_cpus;
-
-use build::util::{exe, mtime, libdir, add_lib_path};
-
-/// A helper macro to `unwrap` a result except also print out details like:
-///
-/// * The file/line of the panic
-/// * The expression that failed
-/// * The error itself
-///
-/// This is currently used judiciously throughout the build system rather than
-/// using a `Result` with `try!`, but this may change on day...
-macro_rules! t {
-    ($e:expr) => (match $e {
-        Ok(e) => e,
-        Err(e) => panic!("{} failed with {}", stringify!($e), e),
-    })
-}
-
-mod cc;
-mod channel;
-mod check;
-mod clean;
-mod compile;
-mod config;
-mod dist;
-mod doc;
-mod flags;
-mod native;
-mod sanity;
-mod step;
-mod util;
-
-#[cfg(windows)]
-mod job;
-
-#[cfg(not(windows))]
-mod job {
-    pub unsafe fn setup() {}
-}
-
-pub use build::config::Config;
-pub use build::flags::Flags;
-
-/// A structure representing a Rust compiler.
-///
-/// Each compiler has a `stage` that it is associated with and a `host` that
-/// corresponds to the platform the compiler runs on. This structure is used as
-/// a parameter to many methods below.
-#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
-pub struct Compiler<'a> {
-    stage: u32,
-    host: &'a str,
-}
-
-/// Global configuration for the build system.
-///
-/// This structure transitively contains all configuration for the build system.
-/// All filesystem-encoded configuration is in `config`, all flags are in
-/// `flags`, and then parsed or probed information is listed in the keys below.
-///
-/// This structure is a parameter of almost all methods in the build system,
-/// although most functions are implemented as free functions rather than
-/// methods specifically on this structure itself (to make it easier to
-/// organize).
-pub struct Build {
-    // User-specified configuration via config.toml
-    config: Config,
-
-    // User-specified configuration via CLI flags
-    flags: Flags,
-
-    // Derived properties from the above two configurations
-    cargo: PathBuf,
-    rustc: PathBuf,
-    src: PathBuf,
-    out: PathBuf,
-    release: String,
-    unstable_features: bool,
-    ver_hash: Option<String>,
-    short_ver_hash: Option<String>,
-    ver_date: Option<String>,
-    version: String,
-    package_vers: String,
-    bootstrap_key: String,
-    bootstrap_key_stage0: String,
-
-    // Probed tools at runtime
-    gdb_version: Option<String>,
-    lldb_version: Option<String>,
-    lldb_python_dir: Option<String>,
-
-    // Runtime state filled in later on
-    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
-    cxx: HashMap<String, gcc::Tool>,
-    compiler_rt_built: RefCell<HashMap<String, PathBuf>>,
-}
-
-/// The various "modes" of invoking Cargo.
-///
-/// These entries currently correspond to the various output directories of the
-/// build system, with each mod generating output in a different directory.
-#[derive(Clone, Copy)]
-pub enum Mode {
-    /// This cargo is going to build the standard library, placing output in the
-    /// "stageN-std" directory.
-    Libstd,
-
-    /// This cargo is going to build libtest, placing output in the
-    /// "stageN-test" directory.
-    Libtest,
-
-    /// This cargo is going to build librustc and compiler libraries, placing
-    /// output in the "stageN-rustc" directory.
-    Librustc,
-
-    /// This cargo is going to some build tool, placing output in the
-    /// "stageN-tools" directory.
-    Tool,
-}
-
-impl Build {
-    /// Creates a new set of build configuration from the `flags` on the command
-    /// line and the filesystem `config`.
-    ///
-    /// By default all build output will be placed in the current directory.
-    pub fn new(flags: Flags, config: Config) -> Build {
-        let cwd = t!(env::current_dir());
-        let src = flags.src.clone().unwrap_or(cwd.clone());
-        let out = cwd.join("build");
-
-        let stage0_root = out.join(&config.build).join("stage0/bin");
-        let rustc = match config.rustc {
-            Some(ref s) => PathBuf::from(s),
-            None => stage0_root.join(exe("rustc", &config.build)),
-        };
-        let cargo = match config.cargo {
-            Some(ref s) => PathBuf::from(s),
-            None => stage0_root.join(exe("cargo", &config.build)),
-        };
-
-        Build {
-            flags: flags,
-            config: config,
-            cargo: cargo,
-            rustc: rustc,
-            src: src,
-            out: out,
-
-            release: String::new(),
-            unstable_features: false,
-            ver_hash: None,
-            short_ver_hash: None,
-            ver_date: None,
-            version: String::new(),
-            bootstrap_key: String::new(),
-            bootstrap_key_stage0: String::new(),
-            package_vers: String::new(),
-            cc: HashMap::new(),
-            cxx: HashMap::new(),
-            compiler_rt_built: RefCell::new(HashMap::new()),
-            gdb_version: None,
-            lldb_version: None,
-            lldb_python_dir: None,
-        }
-    }
-
-    /// Executes the entire build, as configured by the flags and configuration.
-    pub fn build(&mut self) {
-        use build::step::Source::*;
-
-        unsafe {
-            job::setup();
-        }
-
-        if self.flags.clean {
-            return clean::clean(self);
-        }
-
-        self.verbose("finding compilers");
-        cc::find(self);
-        self.verbose("running sanity check");
-        sanity::check(self);
-        self.verbose("collecting channel variables");
-        channel::collect(self);
-        self.verbose("updating submodules");
-        self.update_submodules();
-
-        // The main loop of the build system.
-        //
-        // The `step::all` function returns a topographically sorted list of all
-        // steps that need to be executed as part of this build. Each step has a
-        // corresponding entry in `step.rs` and indicates some unit of work that
-        // needs to be done as part of the build.
-        //
-        // Almost all of these are simple one-liners that shell out to the
-        // corresponding functionality in the extra modules, where more
-        // documentation can be found.
-        for target in step::all(self) {
-            let doc_out = self.out.join(&target.target).join("doc");
-            match target.src {
-                Llvm { _dummy } => {
-                    native::llvm(self, target.target);
-                }
-                CompilerRt { _dummy } => {
-                    native::compiler_rt(self, target.target);
-                }
-                TestHelpers { _dummy } => {
-                    native::test_helpers(self, target.target);
-                }
-                Libstd { compiler } => {
-                    compile::std(self, target.target, &compiler);
-                }
-                Libtest { compiler } => {
-                    compile::test(self, target.target, &compiler);
-                }
-                Librustc { compiler } => {
-                    compile::rustc(self, target.target, &compiler);
-                }
-                LibstdLink { compiler, host } => {
-                    compile::std_link(self, target.target, &compiler, host);
-                }
-                LibtestLink { compiler, host } => {
-                    compile::test_link(self, target.target, &compiler, host);
-                }
-                LibrustcLink { compiler, host } => {
-                    compile::rustc_link(self, target.target, &compiler, host);
-                }
-                Rustc { stage: 0 } => {
-                    // nothing to do...
-                }
-                Rustc { stage } => {
-                    compile::assemble_rustc(self, stage, target.target);
-                }
-                ToolLinkchecker { stage } => {
-                    compile::tool(self, stage, target.target, "linkchecker");
-                }
-                ToolRustbook { stage } => {
-                    compile::tool(self, stage, target.target, "rustbook");
-                }
-                ToolErrorIndex { stage } => {
-                    compile::tool(self, stage, target.target,
-                                  "error_index_generator");
-                }
-                ToolCargoTest { stage } => {
-                    compile::tool(self, stage, target.target, "cargotest");
-                }
-                ToolTidy { stage } => {
-                    compile::tool(self, stage, target.target, "tidy");
-                }
-                ToolCompiletest { stage } => {
-                    compile::tool(self, stage, target.target, "compiletest");
-                }
-                DocBook { stage } => {
-                    doc::rustbook(self, stage, target.target, "book", &doc_out);
-                }
-                DocNomicon { stage } => {
-                    doc::rustbook(self, stage, target.target, "nomicon",
-                                  &doc_out);
-                }
-                DocStyle { stage } => {
-                    doc::rustbook(self, stage, target.target, "style",
-                                  &doc_out);
-                }
-                DocStandalone { stage } => {
-                    doc::standalone(self, stage, target.target, &doc_out);
-                }
-                DocStd { stage } => {
-                    doc::std(self, stage, target.target, &doc_out);
-                }
-                DocTest { stage } => {
-                    doc::test(self, stage, target.target, &doc_out);
-                }
-                DocRustc { stage } => {
-                    doc::rustc(self, stage, target.target, &doc_out);
-                }
-                DocErrorIndex { stage } => {
-                    doc::error_index(self, stage, target.target, &doc_out);
-                }
-
-                CheckLinkcheck { stage } => {
-                    check::linkcheck(self, stage, target.target);
-                }
-                CheckCargoTest { stage } => {
-                    check::cargotest(self, stage, target.target);
-                }
-                CheckTidy { stage } => {
-                    check::tidy(self, stage, target.target);
-                }
-                CheckRPass { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass", "run-pass");
-                }
-                CheckRPassFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass", "run-pass-fulldeps");
-                }
-                CheckCFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "compile-fail", "compile-fail");
-                }
-                CheckCFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "compile-fail", "compile-fail-fulldeps")
-                }
-                CheckPFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "parse-fail", "parse-fail");
-                }
-                CheckRFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-fail", "run-fail");
-                }
-                CheckRFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-fail", "run-fail-fulldeps");
-                }
-                CheckPretty { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "pretty");
-                }
-                CheckPrettyRPass { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass");
-                }
-                CheckPrettyRPassFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass-fulldeps");
-                }
-                CheckPrettyRFail { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-fail");
-                }
-                CheckPrettyRFailFull { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-fail-fulldeps");
-                }
-                CheckPrettyRPassValgrind { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "pretty", "run-pass-valgrind");
-                }
-                CheckCodegen { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "codegen", "codegen");
-                }
-                CheckCodegenUnits { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "codegen-units", "codegen-units");
-                }
-                CheckIncremental { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "incremental", "incremental");
-                }
-                CheckUi { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "ui", "ui");
-                }
-                CheckDebuginfo { compiler } => {
-                    if target.target.contains("msvc") {
-                        // nothing to do
-                    } else if target.target.contains("apple") {
-                        check::compiletest(self, &compiler, target.target,
-                                           "debuginfo-lldb", "debuginfo");
-                    } else {
-                        check::compiletest(self, &compiler, target.target,
-                                           "debuginfo-gdb", "debuginfo");
-                    }
-                }
-                CheckRustdoc { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "rustdoc", "rustdoc");
-                }
-                CheckRPassValgrind { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-pass-valgrind", "run-pass-valgrind");
-                }
-                CheckDocs { compiler } => {
-                    check::docs(self, &compiler);
-                }
-                CheckErrorIndex { compiler } => {
-                    check::error_index(self, &compiler);
-                }
-                CheckRMake { compiler } => {
-                    check::compiletest(self, &compiler, target.target,
-                                       "run-make", "run-make")
-                }
-                CheckCrateStd { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Libstd)
-                }
-                CheckCrateTest { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Libtest)
-                }
-                CheckCrateRustc { compiler } => {
-                    check::krate(self, &compiler, target.target, Mode::Librustc)
-                }
-
-                DistDocs { stage } => dist::docs(self, stage, target.target),
-                DistMingw { _dummy } => dist::mingw(self, target.target),
-                DistRustc { stage } => dist::rustc(self, stage, target.target),
-                DistStd { compiler } => dist::std(self, &compiler, target.target),
-
-                DebuggerScripts { stage } => {
-                    let compiler = Compiler::new(stage, target.target);
-                    dist::debugger_scripts(self,
-                                           &self.sysroot(&compiler),
-                                           target.target);
-                }
-
-                AndroidCopyLibs { compiler } => {
-                    check::android_copy_libs(self, &compiler, target.target);
-                }
-
-                // pseudo-steps
-                Dist { .. } |
-                Doc { .. } |
-                CheckTarget { .. } |
-                Check { .. } => {}
-            }
-        }
-    }
-
-    /// Updates all git submodules that we have.
-    ///
-    /// This will detect if any submodules are out of date an run the necessary
-    /// commands to sync them all with upstream.
-    fn update_submodules(&self) {
-        if !self.config.submodules {
-            return
-        }
-        if fs::metadata(self.src.join(".git")).is_err() {
-            return
-        }
-        let git_submodule = || {
-            let mut cmd = Command::new("git");
-            cmd.current_dir(&self.src).arg("submodule");
-            return cmd
-        };
-
-        // FIXME: this takes a seriously long time to execute on Windows and a
-        //        nontrivial amount of time on Unix, we should have a better way
-        //        of detecting whether we need to run all the submodule commands
-        //        below.
-        let out = output(git_submodule().arg("status"));
-        if !out.lines().any(|l| l.starts_with("+") || l.starts_with("-")) {
-            return
-        }
-
-        self.run(git_submodule().arg("sync"));
-        self.run(git_submodule().arg("init"));
-        self.run(git_submodule().arg("update"));
-        self.run(git_submodule().arg("update").arg("--recursive"));
-        self.run(git_submodule().arg("status").arg("--recursive"));
-        self.run(git_submodule().arg("foreach").arg("--recursive")
-                                .arg("git").arg("clean").arg("-fdx"));
-        self.run(git_submodule().arg("foreach").arg("--recursive")
-                                .arg("git").arg("checkout").arg("."));
-    }
-
-    /// Clear out `dir` if `input` is newer.
-    ///
-    /// After this executes, it will also ensure that `dir` exists.
-    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
-        let stamp = dir.join(".stamp");
-        if mtime(&stamp) < mtime(input) {
-            self.verbose(&format!("Dirty - {}", dir.display()));
-            let _ = fs::remove_dir_all(dir);
-        }
-        t!(fs::create_dir_all(dir));
-        t!(File::create(stamp));
-    }
-
-    /// Prepares an invocation of `cargo` to be run.
-    ///
-    /// This will create a `Command` that represents a pending execution of
-    /// Cargo. This cargo will be configured to use `compiler` as the actual
-    /// rustc compiler, its output will be scoped by `mode`'s output directory,
-    /// it will pass the `--target` flag for the specified `target`, and will be
-    /// executing the Cargo command `cmd`.
-    fn cargo(&self,
-             compiler: &Compiler,
-             mode: Mode,
-             target: &str,
-             cmd: &str) -> Command {
-        let mut cargo = Command::new(&self.cargo);
-        let out_dir = self.stage_out(compiler, mode);
-        cargo.env("CARGO_TARGET_DIR", out_dir)
-             .arg(cmd)
-             .arg("-j").arg(self.jobs().to_string())
-             .arg("--target").arg(target);
-
-        let stage;
-        if compiler.stage == 0 && self.config.local_rebuild {
-            // Assume the local-rebuild rustc already has stage1 features.
-            stage = 1;
-        } else {
-            stage = compiler.stage;
-        }
-
-        // Customize the compiler we're running. Specify the compiler to cargo
-        // as our shim and then pass it some various options used to configure
-        // how the actual compiler itself is called.
-        //
-        // These variables are primarily all read by
-        // src/bootstrap/{rustc,rustdoc.rs}
-        cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
-             .env("RUSTC_REAL", self.compiler_path(compiler))
-             .env("RUSTC_STAGE", stage.to_string())
-             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
-             .env("RUSTC_CODEGEN_UNITS",
-                  self.config.rust_codegen_units.to_string())
-             .env("RUSTC_DEBUG_ASSERTIONS",
-                  self.config.rust_debug_assertions.to_string())
-             .env("RUSTC_SNAPSHOT", &self.rustc)
-             .env("RUSTC_SYSROOT", self.sysroot(compiler))
-             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
-             .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir())
-             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
-             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
-             .env("RUSTDOC_REAL", self.rustdoc(compiler))
-             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
-
-        self.add_bootstrap_key(compiler, &mut cargo);
-
-        // Specify some various options for build scripts used throughout
-        // the build.
-        //
-        // FIXME: the guard against msvc shouldn't need to be here
-        if !target.contains("msvc") {
-            cargo.env(format!("CC_{}", target), self.cc(target))
-                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
-                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
-        }
-
-        // If we're building for OSX, inform the compiler and the linker that
-        // we want to build a compiler runnable on 10.7
-        if target.contains("apple-darwin") {
-            cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7");
-        }
-
-        // Environment variables *required* needed throughout the build
-        //
-        // FIXME: should update code to not require this env var
-        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
-
-        if self.config.verbose || self.flags.verbose {
-            cargo.arg("-v");
-        }
-        if self.config.rust_optimize {
-            cargo.arg("--release");
-        }
-        return cargo
-    }
-
-    /// Get a path to the compiler specified.
-    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.rustc.clone()
-        } else {
-            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
-        }
-    }
-
-    /// Get the specified tool built by the specified compiler
-    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
-        self.cargo_out(compiler, Mode::Tool, compiler.host)
-            .join(exe(tool, compiler.host))
-    }
-
-    /// Get the `rustdoc` executable next to the specified compiler
-    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
-        let mut rustdoc = self.compiler_path(compiler);
-        rustdoc.pop();
-        rustdoc.push(exe("rustdoc", compiler.host));
-        return rustdoc
-    }
-
-    /// Get a `Command` which is ready to run `tool` in `stage` built for
-    /// `host`.
-    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
-        let mut cmd = Command::new(self.tool(&compiler, tool));
-        let host = compiler.host;
-        let paths = vec![
-            self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
-            self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
-            self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
-            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
-        ];
-        add_lib_path(paths, &mut cmd);
-        return cmd
-    }
-
-    /// Get the space-separated set of activated features for the standard
-    /// library.
-    fn std_features(&self) -> String {
-        let mut features = String::new();
-        if self.config.debug_jemalloc {
-            features.push_str(" debug-jemalloc");
-        }
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
-        }
-        return features
-    }
-
-    /// Get the space-separated set of activated features for the compiler.
-    fn rustc_features(&self) -> String {
-        let mut features = String::new();
-        if self.config.use_jemalloc {
-            features.push_str(" jemalloc");
-        }
-        return features
-    }
-
-    /// Component directory that Cargo will produce output into (e.g.
-    /// release/debug)
-    fn cargo_dir(&self) -> &'static str {
-        if self.config.rust_optimize {"release"} else {"debug"}
-    }
-
-    /// Returns the sysroot for the `compiler` specified that *this build system
-    /// generates*.
-    ///
-    /// That is, the sysroot for the stage0 compiler is not what the compiler
-    /// thinks it is by default, but it's the same as the default for stages
-    /// 1-3.
-    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.stage == 0 {
-            self.out.join(compiler.host).join("stage0-sysroot")
-        } else {
-            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
-        }
-    }
-
-    /// Returns the libdir where the standard library and other artifacts are
-    /// found for a compiler's sysroot.
-    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
-        self.sysroot(compiler).join("lib").join("rustlib")
-            .join(target).join("lib")
-    }
-
-    /// Returns the root directory for all output generated in a particular
-    /// stage when running with a particular host compiler.
-    ///
-    /// The mode indicates what the root directory is for.
-    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
-        let suffix = match mode {
-            Mode::Libstd => "-std",
-            Mode::Libtest => "-test",
-            Mode::Tool => "-tools",
-            Mode::Librustc => "-rustc",
-        };
-        self.out.join(compiler.host)
-                .join(format!("stage{}{}", compiler.stage, suffix))
-    }
-
-    /// Returns the root output directory for all Cargo output in a given stage,
-    /// running a particular comipler, wehther or not we're building the
-    /// standard library, and targeting the specified architecture.
-    fn cargo_out(&self,
-                 compiler: &Compiler,
-                 mode: Mode,
-                 target: &str) -> PathBuf {
-        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
-    }
-
-    /// Root output directory for LLVM compiled for `target`
-    ///
-    /// Note that if LLVM is configured externally then the directory returned
-    /// will likely be empty.
-    fn llvm_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("llvm")
-    }
-
-    /// Returns the path to `llvm-config` for the specified target.
-    ///
-    /// If a custom `llvm-config` was specified for target then that's returned
-    /// instead.
-    fn llvm_config(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
-        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-            s.clone()
-        } else {
-            self.llvm_out(&self.config.build).join("bin")
-                .join(exe("llvm-config", target))
-        }
-    }
-
-    /// Returns the path to `FileCheck` binary for the specified target
-    fn llvm_filecheck(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
-        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-            s.parent().unwrap().join(exe("FileCheck", target))
-        } else {
-            let base = self.llvm_out(&self.config.build).join("build");
-            let exe = exe("FileCheck", target);
-            if self.config.build.contains("msvc") {
-                base.join("Release/bin").join(exe)
-            } else {
-                base.join("bin").join(exe)
-            }
-        }
-    }
-
-    /// Root output directory for compiler-rt compiled for `target`
-    fn compiler_rt_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("compiler-rt")
-    }
-
-    /// Root output directory for rust_test_helpers library compiled for
-    /// `target`
-    fn test_helpers_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("rust-test-helpers")
-    }
-
-    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
-    /// library lookup path.
-    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
-        // Windows doesn't need dylib path munging because the dlls for the
-        // compiler live next to the compiler and the system will find them
-        // automatically.
-        if cfg!(windows) {
-            return
-        }
-
-        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
-    }
-
-    /// Adds the compiler's bootstrap key to the environment of `cmd`.
-    fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) {
-        // In stage0 we're using a previously released stable compiler, so we
-        // use the stage0 bootstrap key. Otherwise we use our own build's
-        // bootstrap key.
-        let bootstrap_key = if compiler.is_snapshot(self) && !self.config.local_rebuild {
-            &self.bootstrap_key_stage0
-        } else {
-            &self.bootstrap_key
-        };
-        cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key);
-    }
-
-    /// Returns the compiler's libdir where it stores the dynamic libraries that
-    /// it itself links against.
-    ///
-    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
-    /// Windows.
-    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.rustc_snapshot_libdir()
-        } else {
-            self.sysroot(compiler).join(libdir(compiler.host))
-        }
-    }
-
-    /// Returns the libdir of the snapshot compiler.
-    fn rustc_snapshot_libdir(&self) -> PathBuf {
-        self.rustc.parent().unwrap().parent().unwrap()
-            .join(libdir(&self.config.build))
-    }
-
-    /// Runs a command, printing out nice contextual information if it fails.
-    fn run(&self, cmd: &mut Command) {
-        self.verbose(&format!("running: {:?}", cmd));
-        run_silent(cmd)
-    }
-
-    /// Prints a message if this build is configured in verbose mode.
-    fn verbose(&self, msg: &str) {
-        if self.flags.verbose || self.config.verbose {
-            println!("{}", msg);
-        }
-    }
-
-    /// Returns the number of parallel jobs that have been configured for this
-    /// build.
-    fn jobs(&self) -> u32 {
-        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
-    }
-
-    /// Returns the path to the C compiler for the target specified.
-    fn cc(&self, target: &str) -> &Path {
-        self.cc[target].0.path()
-    }
-
-    /// Returns a list of flags to pass to the C compiler for the target
-    /// specified.
-    fn cflags(&self, target: &str) -> Vec<String> {
-        // Filter out -O and /O (the optimization flags) that we picked up from
-        // gcc-rs because the build scripts will determine that for themselves.
-        let mut base = self.cc[target].0.args().iter()
-                           .map(|s| s.to_string_lossy().into_owned())
-                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
-                           .collect::<Vec<_>>();
-
-        // If we're compiling on OSX then we add a few unconditional flags
-        // indicating that we want libc++ (more filled out than libstdc++) and
-        // we want to compile for 10.7. This way we can ensure that
-        // LLVM/jemalloc/etc are all properly compiled.
-        if target.contains("apple-darwin") {
-            base.push("-stdlib=libc++".into());
-            base.push("-mmacosx-version-min=10.7".into());
-        }
-        return base
-    }
-
-    /// Returns the path to the `ar` archive utility for the target specified.
-    fn ar(&self, target: &str) -> Option<&Path> {
-        self.cc[target].1.as_ref().map(|p| &**p)
-    }
-
-    /// Returns the path to the C++ compiler for the target specified, may panic
-    /// if no C++ compiler was configured for the target.
-    fn cxx(&self, target: &str) -> &Path {
-        self.cxx[target].path()
-    }
-
-    /// Returns flags to pass to the compiler to generate code for `target`.
-    fn rustc_flags(&self, target: &str) -> Vec<String> {
-        // New flags should be added here with great caution!
-        //
-        // It's quite unfortunate to **require** flags to generate code for a
-        // target, so it should only be passed here if absolutely necessary!
-        // Most default configuration should be done through target specs rather
-        // than an entry here.
-
-        let mut base = Vec::new();
-        if target != self.config.build && !target.contains("msvc") {
-            base.push(format!("-Clinker={}", self.cc(target).display()));
-        }
-        return base
-    }
-}
-
-impl<'a> Compiler<'a> {
-    /// Creates a new complier for the specified stage/host
-    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
-        Compiler { stage: stage, host: host }
-    }
-
-    /// Returns whether this is a snapshot compiler for `build`'s configuration
-    fn is_snapshot(&self, build: &Build) -> bool {
-        self.stage == 0 && self.host == build.config.build
-    }
-}
diff --git a/src/bootstrap/build/native.rs b/src/bootstrap/build/native.rs
deleted file mode 100644 (file)
index f6030cf..0000000
+++ /dev/null
@@ -1,238 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Compilation of native dependencies like LLVM.
-//!
-//! Native projects like LLVM unfortunately aren't suited just yet for
-//! compilation in build scripts that Cargo has. This is because thie
-//! compilation takes a *very* long time but also because we don't want to
-//! compile LLVM 3 times as part of a normal bootstrap (we want it cached).
-//!
-//! LLVM and compiler-rt are essentially just wired up to everything else to
-//! ensure that they're always in place if needed.
-
-use std::path::Path;
-use std::process::Command;
-use std::fs::{self, File};
-
-use build_helper::output;
-use cmake;
-use gcc;
-
-use build::Build;
-use build::util::{staticlib, up_to_date};
-
-/// Compile LLVM for `target`.
-pub fn llvm(build: &Build, target: &str) {
-    // If we're using a custom LLVM bail out here, but we can only use a
-    // custom LLVM for the build triple.
-    if let Some(config) = build.config.target_config.get(target) {
-        if let Some(ref s) = config.llvm_config {
-            return check_llvm_version(build, s);
-        }
-    }
-
-    // If the cleaning trigger is newer than our built artifacts (or if the
-    // artifacts are missing) then we keep going, otherwise we bail out.
-    let dst = build.llvm_out(target);
-    let stamp = build.src.join("src/rustllvm/llvm-auto-clean-trigger");
-    let done_stamp = dst.join("llvm-finished-building");
-    build.clear_if_dirty(&dst, &stamp);
-    if fs::metadata(&done_stamp).is_ok() {
-        return
-    }
-
-    println!("Building LLVM for {}", target);
-
-    let _ = fs::remove_dir_all(&dst.join("build"));
-    t!(fs::create_dir_all(&dst.join("build")));
-    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
-
-    // http://llvm.org/docs/CMake.html
-    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
-    if build.config.ninja {
-        cfg.generator("Ninja");
-    }
-    cfg.target(target)
-       .host(&build.config.build)
-       .out_dir(&dst)
-       .profile(if build.config.llvm_optimize {"Release"} else {"Debug"})
-       .define("LLVM_ENABLE_ASSERTIONS", assertions)
-       .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC")
-       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
-       .define("LLVM_INCLUDE_TESTS", "OFF")
-       .define("LLVM_INCLUDE_DOCS", "OFF")
-       .define("LLVM_ENABLE_ZLIB", "OFF")
-       .define("WITH_POLLY", "OFF")
-       .define("LLVM_ENABLE_TERMINFO", "OFF")
-       .define("LLVM_ENABLE_LIBEDIT", "OFF")
-       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string());
-
-    if target.starts_with("i686") {
-        cfg.define("LLVM_BUILD_32_BITS", "ON");
-    }
-
-    // http://llvm.org/docs/HowToCrossCompileLLVM.html
-    if target != build.config.build {
-        // FIXME: if the llvm root for the build triple is overridden then we
-        //        should use llvm-tblgen from there, also should verify that it
-        //        actually exists most of the time in normal installs of LLVM.
-        let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
-        cfg.define("CMAKE_CROSSCOMPILING", "True")
-           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
-           .define("LLVM_TABLEGEN", &host)
-           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
-    }
-
-    // MSVC handles compiler business itself
-    if !target.contains("msvc") {
-        if build.config.ccache {
-           cfg.define("CMAKE_C_COMPILER", "ccache")
-              .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
-              .define("CMAKE_CXX_COMPILER", "ccache")
-              .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
-        } else {
-           cfg.define("CMAKE_C_COMPILER", build.cc(target))
-              .define("CMAKE_CXX_COMPILER", build.cxx(target));
-        }
-        cfg.build_arg("-j").build_arg(build.jobs().to_string());
-
-        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
-        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
-    }
-
-    // FIXME: we don't actually need to build all LLVM tools and all LLVM
-    //        libraries here, e.g. we just want a few components and a few
-    //        tools. Figure out how to filter them down and only build the right
-    //        tools and libs on all platforms.
-    cfg.build();
-
-    t!(File::create(&done_stamp));
-}
-
-fn check_llvm_version(build: &Build, llvm_config: &Path) {
-    if !build.config.llvm_version_check {
-        return
-    }
-
-    let mut cmd = Command::new(llvm_config);
-    let version = output(cmd.arg("--version"));
-    if version.starts_with("3.5") || version.starts_with("3.6") ||
-       version.starts_with("3.7") {
-        return
-    }
-    panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
-}
-
-/// Compiles the `compiler-rt` library, or at least the builtins part of it.
-///
-/// This uses the CMake build system and an existing LLVM build directory to
-/// compile the project.
-pub fn compiler_rt(build: &Build, target: &str) {
-    let dst = build.compiler_rt_out(target);
-    let arch = target.split('-').next().unwrap();
-    let mode = if build.config.rust_optimize {"Release"} else {"Debug"};
-
-    let build_llvm_config = build.llvm_config(&build.config.build);
-    let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt"));
-    cfg.target(target)
-       .host(&build.config.build)
-       .out_dir(&dst)
-       .profile(mode)
-       .define("LLVM_CONFIG_PATH", build_llvm_config)
-       .define("COMPILER_RT_DEFAULT_TARGET_TRIPLE", target)
-       .define("COMPILER_RT_BUILD_SANITIZERS", "OFF")
-       .define("COMPILER_RT_BUILD_EMUTLS", "OFF")
-       // inform about c/c++ compilers, the c++ compiler isn't actually used but
-       // it's needed to get the initial configure to work on all platforms.
-       .define("CMAKE_C_COMPILER", build.cc(target))
-       .define("CMAKE_CXX_COMPILER", build.cc(target));
-
-    let (dir, build_target, libname) = if target.contains("linux") ||
-                                          target.contains("freebsd") ||
-                                          target.contains("netbsd") {
-        let os_extra = if target.contains("android") && target.contains("arm") {
-            "-android"
-        } else {
-            ""
-        };
-        let builtins_arch = match arch {
-            "i586" => "i386",
-            "arm" | "armv7" if target.contains("android") => "armhf",
-            "arm" if target.contains("eabihf") => "armhf",
-            _ => arch,
-        };
-        let target = format!("clang_rt.builtins-{}", builtins_arch);
-        ("linux".to_string(),
-         target.clone(),
-         format!("{}{}", target, os_extra))
-    } else if target.contains("apple-darwin") {
-        let builtins_arch = match arch {
-            "i686" => "i386",
-            _ => arch,
-        };
-        let target = format!("clang_rt.builtins_{}_osx", builtins_arch);
-        ("builtins".to_string(), target.clone(), target)
-    } else if target.contains("apple-ios") {
-        cfg.define("COMPILER_RT_ENABLE_IOS", "ON");
-        let target = match arch {
-            "armv7s" => "hard_pic_armv7em_macho_embedded".to_string(),
-            "aarch64" => "builtins_arm64_ios".to_string(),
-            _ => format!("hard_pic_{}_macho_embedded", arch),
-        };
-        ("builtins".to_string(), target.clone(), target)
-    } else if target.contains("windows-gnu") {
-        let target = format!("clang_rt.builtins-{}", arch);
-        ("windows".to_string(), target.clone(), target)
-    } else if target.contains("windows-msvc") {
-        let builtins_arch = match arch {
-            "i586" | "i686" => "i386",
-            _ => arch,
-        };
-        (format!("windows/{}", mode),
-         "lib/builtins/builtins".to_string(),
-         format!("clang_rt.builtins-{}", builtins_arch))
-    } else {
-        panic!("can't get os from target: {}", target)
-    };
-    let output = dst.join("build/lib").join(dir)
-                    .join(staticlib(&libname, target));
-    build.compiler_rt_built.borrow_mut().insert(target.to_string(),
-                                                output.clone());
-    if fs::metadata(&output).is_ok() {
-        return
-    }
-    let _ = fs::remove_dir_all(&dst);
-    t!(fs::create_dir_all(&dst));
-    cfg.build_target(&build_target);
-    cfg.build();
-}
-
-/// Compiles the `rust_test_helpers.c` library which we used in various
-/// `run-pass` test suites for ABI testing.
-pub fn test_helpers(build: &Build, target: &str) {
-    let dst = build.test_helpers_out(target);
-    let src = build.src.join("src/rt/rust_test_helpers.c");
-    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
-        return
-    }
-
-    println!("Building test helpers");
-    t!(fs::create_dir_all(&dst));
-    let mut cfg = gcc::Config::new();
-    cfg.cargo_metadata(false)
-       .out_dir(&dst)
-       .target(target)
-       .host(&build.config.build)
-       .opt_level(0)
-       .debug(false)
-       .file(build.src.join("src/rt/rust_test_helpers.c"))
-       .compile("librust_test_helpers.a");
-}
diff --git a/src/bootstrap/build/sanity.rs b/src/bootstrap/build/sanity.rs
deleted file mode 100644 (file)
index 5eced00..0000000
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Sanity checking performed by rustbuild before actually executing anything.
-//!
-//! This module contains the implementation of ensuring that the build
-//! environment looks reasonable before progressing. This will verify that
-//! various programs like git and python exist, along with ensuring that all C
-//! compilers for cross-compiling are found.
-//!
-//! In theory if we get past this phase it's a bug if a build fails, but in
-//! practice that's likely not true!
-
-use std::collections::HashSet;
-use std::env;
-use std::ffi::{OsStr, OsString};
-use std::fs;
-use std::process::Command;
-
-use build_helper::output;
-
-use build::Build;
-
-pub fn check(build: &mut Build) {
-    let mut checked = HashSet::new();
-    let path = env::var_os("PATH").unwrap_or(OsString::new());
-    let mut need_cmd = |cmd: &OsStr| {
-        if !checked.insert(cmd.to_owned()) {
-            return
-        }
-        for path in env::split_paths(&path).map(|p| p.join(cmd)) {
-            if fs::metadata(&path).is_ok() ||
-               fs::metadata(path.with_extension("exe")).is_ok() {
-                return
-            }
-        }
-        panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
-    };
-
-    // If we've got a git directory we're gona need git to update
-    // submodules and learn about various other aspects.
-    if fs::metadata(build.src.join(".git")).is_ok() {
-        need_cmd("git".as_ref());
-    }
-
-    // We need cmake, but only if we're actually building LLVM
-    for host in build.config.host.iter() {
-        if let Some(config) = build.config.target_config.get(host) {
-            if config.llvm_config.is_some() {
-                continue
-            }
-        }
-        need_cmd("cmake".as_ref());
-        if build.config.ninja {
-            need_cmd("ninja".as_ref())
-        }
-        break
-    }
-
-    need_cmd("python".as_ref());
-
-    // We're gonna build some custom C code here and there, host triples
-    // also build some C++ shims for LLVM so we need a C++ compiler.
-    for target in build.config.target.iter() {
-        need_cmd(build.cc(target).as_ref());
-        if let Some(ar) = build.ar(target) {
-            need_cmd(ar.as_ref());
-        }
-    }
-    for host in build.config.host.iter() {
-        need_cmd(build.cxx(host).as_ref());
-    }
-
-    // Externally configured LLVM requires FileCheck to exist
-    let filecheck = build.llvm_filecheck(&build.config.build);
-    if !filecheck.starts_with(&build.out) && !filecheck.exists() {
-        panic!("filecheck executable {:?} does not exist", filecheck);
-    }
-
-    for target in build.config.target.iter() {
-        // Either can't build or don't want to run jemalloc on these targets
-        if target.contains("rumprun") ||
-           target.contains("bitrig") ||
-           target.contains("openbsd") ||
-           target.contains("msvc") {
-            build.config.use_jemalloc = false;
-        }
-
-        // Can't compile for iOS unless we're on OSX
-        if target.contains("apple-ios") &&
-           !build.config.build.contains("apple-darwin") {
-            panic!("the iOS target is only supported on OSX");
-        }
-
-        // Make sure musl-root is valid if specified
-        if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) {
-            match build.config.musl_root {
-                Some(ref root) => {
-                    if fs::metadata(root.join("lib/libc.a")).is_err() {
-                        panic!("couldn't find libc.a in musl dir: {}",
-                               root.join("lib").display());
-                    }
-                    if fs::metadata(root.join("lib/libunwind.a")).is_err() {
-                        panic!("couldn't find libunwind.a in musl dir: {}",
-                               root.join("lib").display());
-                    }
-                }
-                None => {
-                    panic!("when targeting MUSL the build.musl-root option \
-                            must be specified in config.toml")
-                }
-            }
-        }
-
-        if target.contains("msvc") {
-            // There are three builds of cmake on windows: MSVC, MinGW, and
-            // Cygwin. The Cygwin build does not have generators for Visual
-            // Studio, so detect that here and error.
-            let out = output(Command::new("cmake").arg("--help"));
-            if !out.contains("Visual Studio") {
-                panic!("
-cmake does not support Visual Studio generators.
-
-This is likely due to it being an msys/cygwin build of cmake,
-rather than the required windows version, built using MinGW
-or Visual Studio.
-
-If you are building under msys2 try installing the mingw-w64-x86_64-cmake
-package instead of cmake:
-
-$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
-");
-            }
-        }
-
-        if target.contains("arm-linux-android") {
-            need_cmd("adb".as_ref());
-        }
-    }
-
-    for host in build.flags.host.iter() {
-        if !build.config.host.contains(host) {
-            panic!("specified host `{}` is not in the ./configure list", host);
-        }
-    }
-    for target in build.flags.target.iter() {
-        if !build.config.target.contains(target) {
-            panic!("specified target `{}` is not in the ./configure list",
-                   target);
-        }
-    }
-
-    let run = |cmd: &mut Command| {
-        cmd.output().map(|output| {
-            String::from_utf8_lossy(&output.stdout)
-                   .lines().next().unwrap()
-                   .to_string()
-        })
-    };
-    build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
-    build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
-    if build.lldb_version.is_some() {
-        build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
-    }
-}
diff --git a/src/bootstrap/build/step.rs b/src/bootstrap/build/step.rs
deleted file mode 100644 (file)
index 7cbbd67..0000000
+++ /dev/null
@@ -1,590 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Major workhorse of rustbuild, definition and dependencies between stages of
-//! the copmile.
-//!
-//! The primary purpose of this module is to define the various `Step`s of
-//! execution of the build. Each `Step` has a corresponding `Source` indicating
-//! what it's actually doing along with a number of dependencies which must be
-//! executed first.
-//!
-//! This module will take the CLI as input and calculate the steps required for
-//! the build requested, ensuring that all intermediate pieces are in place.
-//! Essentially this module is a `make`-replacement, but not as good.
-
-use std::collections::HashSet;
-
-use build::{Build, Compiler};
-
-#[derive(Hash, Eq, PartialEq, Clone, Debug)]
-pub struct Step<'a> {
-    pub src: Source<'a>,
-    pub target: &'a str,
-}
-
-/// Macro used to iterate over all targets that are recognized by the build
-/// system.
-///
-/// Whenever a new step is added it will involve adding an entry here, updating
-/// the dependencies section below, and then adding an implementation of the
-/// step in `build/mod.rs`.
-///
-/// This macro takes another macro as an argument and then calls that macro with
-/// all steps that the build system knows about.
-macro_rules! targets {
-    ($m:ident) => {
-        $m! {
-            // Step representing building the stageN compiler. This is just the
-            // compiler executable itself, not any of the support libraries
-            (rustc, Rustc { stage: u32 }),
-
-            // Steps for the two main cargo builds. These are parameterized over
-            // the compiler which is producing the artifact.
-            (libstd, Libstd { compiler: Compiler<'a> }),
-            (libtest, Libtest { compiler: Compiler<'a> }),
-            (librustc, Librustc { compiler: Compiler<'a> }),
-
-            // Links the target produced by the compiler provided into the
-            // host's directory also provided.
-            (libstd_link, LibstdLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-            (libtest_link, LibtestLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-            (librustc_link, LibrustcLink {
-                compiler: Compiler<'a>,
-                host: &'a str
-            }),
-
-            // Various tools that we can build as part of the build.
-            (tool_linkchecker, ToolLinkchecker { stage: u32 }),
-            (tool_rustbook, ToolRustbook { stage: u32 }),
-            (tool_error_index, ToolErrorIndex { stage: u32 }),
-            (tool_cargotest, ToolCargoTest { stage: u32 }),
-            (tool_tidy, ToolTidy { stage: u32 }),
-            (tool_compiletest, ToolCompiletest { stage: u32 }),
-
-            // Steps for long-running native builds. Ideally these wouldn't
-            // actually exist and would be part of build scripts, but for now
-            // these are here.
-            //
-            // There aren't really any parameters to this, but empty structs
-            // with braces are unstable so we just pick something that works.
-            (llvm, Llvm { _dummy: () }),
-            (compiler_rt, CompilerRt { _dummy: () }),
-            (test_helpers, TestHelpers { _dummy: () }),
-            (debugger_scripts, DebuggerScripts { stage: u32 }),
-
-            // Steps for various pieces of documentation that we can generate,
-            // the 'doc' step is just a pseudo target to depend on a bunch of
-            // others.
-            (doc, Doc { stage: u32 }),
-            (doc_book, DocBook { stage: u32 }),
-            (doc_nomicon, DocNomicon { stage: u32 }),
-            (doc_style, DocStyle { stage: u32 }),
-            (doc_standalone, DocStandalone { stage: u32 }),
-            (doc_std, DocStd { stage: u32 }),
-            (doc_test, DocTest { stage: u32 }),
-            (doc_rustc, DocRustc { stage: u32 }),
-            (doc_error_index, DocErrorIndex { stage: u32 }),
-
-            // Steps for running tests. The 'check' target is just a pseudo
-            // target to depend on a bunch of others.
-            (check, Check { stage: u32, compiler: Compiler<'a> }),
-            (check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
-            (check_linkcheck, CheckLinkcheck { stage: u32 }),
-            (check_cargotest, CheckCargoTest { stage: u32 }),
-            (check_tidy, CheckTidy { stage: u32 }),
-            (check_rpass, CheckRPass { compiler: Compiler<'a> }),
-            (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
-            (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
-            (check_rfail, CheckRFail { compiler: Compiler<'a> }),
-            (check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }),
-            (check_cfail, CheckCFail { compiler: Compiler<'a> }),
-            (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
-            (check_pfail, CheckPFail { compiler: Compiler<'a> }),
-            (check_pretty, CheckPretty { compiler: Compiler<'a> }),
-            (check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }),
-            (check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }),
-            (check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }),
-            (check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }),
-            (check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }),
-            (check_codegen, CheckCodegen { compiler: Compiler<'a> }),
-            (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
-            (check_incremental, CheckIncremental { compiler: Compiler<'a> }),
-            (check_ui, CheckUi { compiler: Compiler<'a> }),
-            (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
-            (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
-            (check_docs, CheckDocs { compiler: Compiler<'a> }),
-            (check_error_index, CheckErrorIndex { compiler: Compiler<'a> }),
-            (check_rmake, CheckRMake { compiler: Compiler<'a> }),
-            (check_crate_std, CheckCrateStd { compiler: Compiler<'a> }),
-            (check_crate_test, CheckCrateTest { compiler: Compiler<'a> }),
-            (check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }),
-
-            // Distribution targets, creating tarballs
-            (dist, Dist { stage: u32 }),
-            (dist_docs, DistDocs { stage: u32 }),
-            (dist_mingw, DistMingw { _dummy: () }),
-            (dist_rustc, DistRustc { stage: u32 }),
-            (dist_std, DistStd { compiler: Compiler<'a> }),
-
-            // Misc targets
-            (android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
-        }
-    }
-}
-
-// Define the `Source` enum by iterating over all the steps and peeling out just
-// the types that we want to define.
-
-macro_rules! item { ($a:item) => ($a) }
-
-macro_rules! define_source {
-    ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {
-        item! {
-            #[derive(Hash, Eq, PartialEq, Clone, Debug)]
-            pub enum Source<'a> {
-                $($name { $($args)* }),*
-            }
-        }
-    }
-}
-
-targets!(define_source);
-
-/// Calculate a list of all steps described by `build`.
-///
-/// This will inspect the flags passed in on the command line and use that to
-/// build up a list of steps to execute. These steps will then be transformed
-/// into a topologically sorted list which when executed left-to-right will
-/// correctly sequence the entire build.
-pub fn all(build: &Build) -> Vec<Step> {
-    let mut ret = Vec::new();
-    let mut all = HashSet::new();
-    for target in top_level(build) {
-        fill(build, &target, &mut ret, &mut all);
-    }
-    return ret;
-
-    fn fill<'a>(build: &'a Build,
-                target: &Step<'a>,
-                ret: &mut Vec<Step<'a>>,
-                set: &mut HashSet<Step<'a>>) {
-        if set.insert(target.clone()) {
-            for dep in target.deps(build) {
-                fill(build, &dep, ret, set);
-            }
-            ret.push(target.clone());
-        }
-    }
-}
-
-/// Determines what top-level targets are requested as part of this build,
-/// returning them as a list.
-fn top_level(build: &Build) -> Vec<Step> {
-    let mut targets = Vec::new();
-    let stage = build.flags.stage.unwrap_or(2);
-
-    let host = Step {
-        src: Source::Llvm { _dummy: () },
-        target: build.flags.host.iter().next()
-                     .unwrap_or(&build.config.build),
-    };
-    let target = Step {
-        src: Source::Llvm { _dummy: () },
-        target: build.flags.target.iter().next().map(|x| &x[..])
-                     .unwrap_or(host.target)
-    };
-
-    // First, try to find steps on the command line.
-    add_steps(build, stage, &host, &target, &mut targets);
-
-    // If none are specified, then build everything.
-    if targets.len() == 0 {
-        let t = Step {
-            src: Source::Llvm { _dummy: () },
-            target: &build.config.build,
-        };
-        if build.config.docs {
-          targets.push(t.doc(stage));
-        }
-        for host in build.config.host.iter() {
-            if !build.flags.host.contains(host) {
-                continue
-            }
-            let host = t.target(host);
-            if host.target == build.config.build {
-                targets.push(host.librustc(host.compiler(stage)));
-            } else {
-                targets.push(host.librustc_link(t.compiler(stage), host.target));
-            }
-            for target in build.config.target.iter() {
-                if !build.flags.target.contains(target) {
-                    continue
-                }
-
-                if host.target == build.config.build {
-                    targets.push(host.target(target)
-                                     .libtest(host.compiler(stage)));
-                } else {
-                    targets.push(host.target(target)
-                                     .libtest_link(t.compiler(stage), host.target));
-                }
-            }
-        }
-    }
-
-    return targets
-
-}
-
-fn add_steps<'a>(build: &'a Build,
-                 stage: u32,
-                 host: &Step<'a>,
-                 target: &Step<'a>,
-                 targets: &mut Vec<Step<'a>>) {
-    struct Context<'a> {
-        stage: u32,
-        compiler: Compiler<'a>,
-        _dummy: (),
-        host: &'a str,
-    }
-    for step in build.flags.step.iter() {
-
-        // The macro below insists on hygienic access to all local variables, so
-        // we shove them all in a struct and subvert hygiene by accessing struct
-        // fields instead,
-        let cx = Context {
-            stage: stage,
-            compiler: host.target(&build.config.build).compiler(stage),
-            _dummy: (),
-            host: host.target,
-        };
-        macro_rules! add_step {
-            ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(
-                let name = stringify!($short).replace("_", "-");
-                if &step[..] == &name[..] {
-                    targets.push(target.$short($(cx.$arg),*));
-                    continue
-                }
-                drop(name);
-            )*})
-        }
-
-        targets!(add_step);
-
-        panic!("unknown step: {}", step);
-    }
-}
-
-macro_rules! constructors {
-    ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(
-        fn $short(&self, $($arg: $t),*) -> Step<'a> {
-            Step {
-                src: Source::$name { $($arg: $arg),* },
-                target: self.target,
-            }
-        }
-    )*}
-}
-
-impl<'a> Step<'a> {
-    fn compiler(&self, stage: u32) -> Compiler<'a> {
-        Compiler::new(stage, self.target)
-    }
-
-    fn target(&self, target: &'a str) -> Step<'a> {
-        Step { target: target, src: self.src.clone() }
-    }
-
-    // Define ergonomic constructors for each step defined above so they can be
-    // easily constructed.
-    targets!(constructors);
-
-    /// Mapping of all dependencies for rustbuild.
-    ///
-    /// This function receives a step, the build that we're building for, and
-    /// then returns a list of all the dependencies of that step.
-    pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {
-        match self.src {
-            Source::Rustc { stage: 0 } => {
-                Vec::new()
-            }
-            Source::Rustc { stage } => {
-                let compiler = Compiler::new(stage - 1, &build.config.build);
-                vec![self.librustc(compiler)]
-            }
-            Source::Librustc { compiler } => {
-                vec![self.libtest(compiler), self.llvm(())]
-            }
-            Source::Libtest { compiler } => {
-                vec![self.libstd(compiler)]
-            }
-            Source::Libstd { compiler } => {
-                vec![self.compiler_rt(()),
-                     self.rustc(compiler.stage).target(compiler.host)]
-            }
-            Source::LibrustcLink { compiler, host } => {
-                vec![self.librustc(compiler),
-                     self.libtest_link(compiler, host)]
-            }
-            Source::LibtestLink { compiler, host } => {
-                vec![self.libtest(compiler), self.libstd_link(compiler, host)]
-            }
-            Source::LibstdLink { compiler, host } => {
-                vec![self.libstd(compiler),
-                     self.target(host).rustc(compiler.stage)]
-            }
-            Source::CompilerRt { _dummy } => {
-                vec![self.llvm(()).target(&build.config.build)]
-            }
-            Source::Llvm { _dummy } => Vec::new(),
-            Source::TestHelpers { _dummy } => Vec::new(),
-            Source::DebuggerScripts { stage: _ } => Vec::new(),
-
-            // Note that all doc targets depend on artifacts from the build
-            // architecture, not the target (which is where we're generating
-            // docs into).
-            Source::DocStd { stage } => {
-                let compiler = self.target(&build.config.build).compiler(stage);
-                vec![self.libstd(compiler)]
-            }
-            Source::DocTest { stage } => {
-                let compiler = self.target(&build.config.build).compiler(stage);
-                vec![self.libtest(compiler)]
-            }
-            Source::DocBook { stage } |
-            Source::DocNomicon { stage } |
-            Source::DocStyle { stage } => {
-                vec![self.target(&build.config.build).tool_rustbook(stage)]
-            }
-            Source::DocErrorIndex { stage } => {
-                vec![self.target(&build.config.build).tool_error_index(stage)]
-            }
-            Source::DocStandalone { stage } => {
-                vec![self.target(&build.config.build).rustc(stage)]
-            }
-            Source::DocRustc { stage } => {
-                vec![self.doc_test(stage)]
-            }
-            Source::Doc { stage } => {
-                vec![self.doc_book(stage), self.doc_nomicon(stage),
-                     self.doc_style(stage), self.doc_standalone(stage),
-                     self.doc_std(stage),
-                     self.doc_error_index(stage)]
-            }
-            Source::Check { stage, compiler } => {
-                // Check is just a pseudo step which means check all targets,
-                // so just depend on checking all targets.
-                build.config.target.iter().map(|t| {
-                    self.target(t).check_target(stage, compiler)
-                }).collect()
-            }
-            Source::CheckTarget { stage, compiler } => {
-                // CheckTarget here means run all possible test suites for this
-                // target. Most of the time, however, we can't actually run
-                // anything if we're not the build triple as we could be cross
-                // compiling.
-                //
-                // As a result, the base set of targets here is quite stripped
-                // down from the standard set of targets. These suites have
-                // their own internal logic to run in cross-compiled situations
-                // if they'll run at all. For example compiletest knows that
-                // when testing Android targets we ship artifacts to the
-                // emulator.
-                //
-                // When in doubt the rule of thumb for adding to this list is
-                // "should this test suite run on the android bot?"
-                let mut base = vec![
-                    self.check_rpass(compiler),
-                    self.check_rfail(compiler),
-                    self.check_crate_std(compiler),
-                    self.check_crate_test(compiler),
-                    self.check_debuginfo(compiler),
-                    self.dist(stage),
-                ];
-
-                // If we're testing the build triple, then we know we can
-                // actually run binaries and such, so we run all possible tests
-                // that we know about.
-                if self.target == build.config.build {
-                    base.extend(vec![
-                        // docs-related
-                        self.check_docs(compiler),
-                        self.check_error_index(compiler),
-                        self.check_rustdoc(compiler),
-
-                        // UI-related
-                        self.check_cfail(compiler),
-                        self.check_pfail(compiler),
-                        self.check_ui(compiler),
-
-                        // codegen-related
-                        self.check_incremental(compiler),
-                        self.check_codegen(compiler),
-                        self.check_codegen_units(compiler),
-
-                        // misc compiletest-test suites
-                        self.check_rpass_full(compiler),
-                        self.check_rfail_full(compiler),
-                        self.check_cfail_full(compiler),
-                        self.check_pretty_rpass_full(compiler),
-                        self.check_pretty_rfail_full(compiler),
-                        self.check_rpass_valgrind(compiler),
-                        self.check_rmake(compiler),
-
-                        // crates
-                        self.check_crate_rustc(compiler),
-
-                        // pretty
-                        self.check_pretty(compiler),
-                        self.check_pretty_rpass(compiler),
-                        self.check_pretty_rfail(compiler),
-                        self.check_pretty_rpass_valgrind(compiler),
-
-                        // misc
-                        self.check_linkcheck(stage),
-                        self.check_tidy(stage),
-                    ]);
-                }
-                return base
-            }
-            Source::CheckLinkcheck { stage } => {
-                vec![self.tool_linkchecker(stage), self.doc(stage)]
-            }
-            Source::CheckCargoTest { stage } => {
-                vec![self.tool_cargotest(stage),
-                     self.librustc(self.compiler(stage))]
-            }
-            Source::CheckTidy { stage } => {
-                vec![self.tool_tidy(stage)]
-            }
-            Source::CheckPrettyRPass { compiler } |
-            Source::CheckPrettyRFail { compiler } |
-            Source::CheckRFail { compiler } |
-            Source::CheckPFail { compiler } |
-            Source::CheckCodegen { compiler } |
-            Source::CheckCodegenUnits { compiler } |
-            Source::CheckIncremental { compiler } |
-            Source::CheckUi { compiler } |
-            Source::CheckRustdoc { compiler } |
-            Source::CheckPretty { compiler } |
-            Source::CheckCFail { compiler } |
-            Source::CheckRPassValgrind { compiler } |
-            Source::CheckRPass { compiler } => {
-                let mut base = vec![
-                    self.libtest(compiler),
-                    self.target(compiler.host).tool_compiletest(compiler.stage),
-                    self.test_helpers(()),
-                ];
-                if self.target.contains("android") {
-                    base.push(self.android_copy_libs(compiler));
-                }
-                base
-            }
-            Source::CheckDebuginfo { compiler } => {
-                vec![
-                    self.libtest(compiler),
-                    self.target(compiler.host).tool_compiletest(compiler.stage),
-                    self.test_helpers(()),
-                    self.debugger_scripts(compiler.stage),
-                ]
-            }
-            Source::CheckRPassFull { compiler } |
-            Source::CheckRFailFull { compiler } |
-            Source::CheckCFailFull { compiler } |
-            Source::CheckPrettyRPassFull { compiler } |
-            Source::CheckPrettyRFailFull { compiler } |
-            Source::CheckPrettyRPassValgrind { compiler } |
-            Source::CheckRMake { compiler } => {
-                vec![self.librustc(compiler),
-                     self.target(compiler.host).tool_compiletest(compiler.stage)]
-            }
-            Source::CheckDocs { compiler } => {
-                vec![self.libstd(compiler)]
-            }
-            Source::CheckErrorIndex { compiler } => {
-                vec![self.libstd(compiler),
-                     self.target(compiler.host).tool_error_index(compiler.stage)]
-            }
-            Source::CheckCrateStd { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-            Source::CheckCrateTest { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-            Source::CheckCrateRustc { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-
-            Source::ToolLinkchecker { stage } |
-            Source::ToolTidy { stage } => {
-                vec![self.libstd(self.compiler(stage))]
-            }
-            Source::ToolErrorIndex { stage } |
-            Source::ToolRustbook { stage } => {
-                vec![self.librustc(self.compiler(stage))]
-            }
-            Source::ToolCargoTest { stage } => {
-                vec![self.libstd(self.compiler(stage))]
-            }
-            Source::ToolCompiletest { stage } => {
-                vec![self.libtest(self.compiler(stage))]
-            }
-
-            Source::DistDocs { stage } => vec![self.doc(stage)],
-            Source::DistMingw { _dummy: _ } => Vec::new(),
-            Source::DistRustc { stage } => {
-                vec![self.rustc(stage)]
-            }
-            Source::DistStd { compiler } => {
-                // We want to package up as many target libraries as possible
-                // for the `rust-std` package, so if this is a host target we
-                // depend on librustc and otherwise we just depend on libtest.
-                if build.config.host.iter().any(|t| t == self.target) {
-                    vec![self.librustc(compiler)]
-                } else {
-                    vec![self.libtest(compiler)]
-                }
-            }
-
-            Source::Dist { stage } => {
-                let mut base = Vec::new();
-
-                for host in build.config.host.iter() {
-                    let host = self.target(host);
-                    base.push(host.dist_rustc(stage));
-                    if host.target.contains("windows-gnu") {
-                        base.push(host.dist_mingw(()));
-                    }
-
-                    let compiler = self.compiler(stage);
-                    for target in build.config.target.iter() {
-                        let target = self.target(target);
-                        if build.config.docs {
-                            base.push(target.dist_docs(stage));
-                        }
-                        base.push(target.dist_std(compiler));
-                    }
-                }
-                return base
-            }
-
-            Source::AndroidCopyLibs { compiler } => {
-                vec![self.libtest(compiler)]
-            }
-        }
-    }
-}
diff --git a/src/bootstrap/build/util.rs b/src/bootstrap/build/util.rs
deleted file mode 100644 (file)
index 36ce064..0000000
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Various utility functions used throughout rustbuild.
-//!
-//! Simple things like testing the various filesystem operations here and there,
-//! not a lot of interesting happenings here unfortunately.
-
-use std::env;
-use std::path::{Path, PathBuf};
-use std::fs;
-use std::process::Command;
-
-use bootstrap::{dylib_path, dylib_path_var};
-use filetime::FileTime;
-
-/// Returns the `name` as the filename of a static library for `target`.
-pub fn staticlib(name: &str, target: &str) -> String {
-    if target.contains("windows-msvc") {
-        format!("{}.lib", name)
-    } else {
-        format!("lib{}.a", name)
-    }
-}
-
-/// Returns the last-modified time for `path`, or zero if it doesn't exist.
-pub fn mtime(path: &Path) -> FileTime {
-    fs::metadata(path).map(|f| {
-        FileTime::from_last_modification_time(&f)
-    }).unwrap_or(FileTime::zero())
-}
-
-/// Copies a file from `src` to `dst`, attempting to use hard links and then
-/// falling back to an actually filesystem copy if necessary.
-pub fn copy(src: &Path, dst: &Path) {
-    let res = fs::hard_link(src, dst);
-    let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
-    if let Err(e) = res {
-        panic!("failed to copy `{}` to `{}`: {}", src.display(),
-               dst.display(), e)
-    }
-}
-
-/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
-/// when this function is called.
-pub fn cp_r(src: &Path, dst: &Path) {
-    for f in t!(fs::read_dir(src)) {
-        let f = t!(f);
-        let path = f.path();
-        let name = path.file_name().unwrap();
-        let dst = dst.join(name);
-        if t!(f.file_type()).is_dir() {
-            let _ = fs::remove_dir_all(&dst);
-            t!(fs::create_dir(&dst));
-            cp_r(&path, &dst);
-        } else {
-            let _ = fs::remove_file(&dst);
-            copy(&path, &dst);
-        }
-    }
-}
-
-/// Given an executable called `name`, return the filename for the
-/// executable for a particular target.
-pub fn exe(name: &str, target: &str) -> String {
-    if target.contains("windows") {
-        format!("{}.exe", name)
-    } else {
-        name.to_string()
-    }
-}
-
-/// Returns whether the file name given looks like a dynamic library.
-pub fn is_dylib(name: &str) -> bool {
-    name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
-}
-
-/// Returns the corresponding relative library directory that the compiler's
-/// dylibs will be found in.
-pub fn libdir(target: &str) -> &'static str {
-    if target.contains("windows") {"bin"} else {"lib"}
-}
-
-/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
-pub fn add_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
-    let mut list = dylib_path();
-    for path in path {
-        list.insert(0, path);
-    }
-    cmd.env(dylib_path_var(), t!(env::join_paths(list)));
-}
-
-/// Returns whether `dst` is up to date given that the file or files in `src`
-/// are used to generate it.
-///
-/// Uses last-modified time checks to verify this.
-pub fn up_to_date(src: &Path, dst: &Path) -> bool {
-    let threshold = mtime(dst);
-    let meta = t!(fs::metadata(src));
-    if meta.is_dir() {
-        dir_up_to_date(src, &threshold)
-    } else {
-        FileTime::from_last_modification_time(&meta) <= threshold
-    }
-}
-
-fn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {
-    t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {
-        let meta = t!(e.metadata());
-        if meta.is_dir() {
-            dir_up_to_date(&e.path(), threshold)
-        } else {
-            FileTime::from_last_modification_time(&meta) < *threshold
-        }
-    })
-}
diff --git a/src/bootstrap/cc.rs b/src/bootstrap/cc.rs
new file mode 100644 (file)
index 0000000..e2bde4a
--- /dev/null
@@ -0,0 +1,124 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! C-compiler probing and detection.
+//!
+//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for
+//! C and C++ compilers for each target configured. A compiler is found through
+//! a number of vectors (in order of precedence)
+//!
+//! 1. Configuration via `target.$target.cc` in `config.toml`.
+//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if
+//!    applicable
+//! 3. Special logic to probe on OpenBSD
+//! 4. The `CC_$target` environment variable.
+//! 5. The `CC` environment variable.
+//! 6. "cc"
+//!
+//! Some of this logic is implemented here, but much of it is farmed out to the
+//! `gcc` crate itself, so we end up having the same fallbacks as there.
+//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is
+//! used.
+//!
+//! It is intended that after this module has run no C/C++ compiler will
+//! ever be probed for. Instead the compilers found here will be used for
+//! everything.
+
+use std::process::Command;
+
+use build_helper::{cc2ar, output};
+use gcc;
+
+use Build;
+use config::Target;
+
+pub fn find(build: &mut Build) {
+    // For all targets we're going to need a C compiler for building some shims
+    // and such as well as for being a linker for Rust code.
+    for target in build.config.target.iter() {
+        let mut cfg = gcc::Config::new();
+        cfg.cargo_metadata(false).opt_level(0).debug(false)
+           .target(target).host(&build.config.build);
+
+        let config = build.config.target_config.get(target);
+        if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
+            cfg.compiler(cc);
+        } else {
+            set_compiler(&mut cfg, "gcc", target, config);
+        }
+
+        let compiler = cfg.get_compiler();
+        let ar = cc2ar(compiler.path(), target);
+        build.verbose(&format!("CC_{} = {:?}", target, compiler.path()));
+        if let Some(ref ar) = ar {
+            build.verbose(&format!("AR_{} = {:?}", target, ar));
+        }
+        build.cc.insert(target.to_string(), (compiler, ar));
+    }
+
+    // For all host triples we need to find a C++ compiler as well
+    for host in build.config.host.iter() {
+        let mut cfg = gcc::Config::new();
+        cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true)
+           .target(host).host(&build.config.build);
+        let config = build.config.target_config.get(host);
+        if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
+            cfg.compiler(cxx);
+        } else {
+            set_compiler(&mut cfg, "g++", host, config);
+        }
+        let compiler = cfg.get_compiler();
+        build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
+        build.cxx.insert(host.to_string(), compiler);
+    }
+}
+
+fn set_compiler(cfg: &mut gcc::Config,
+                gnu_compiler: &str,
+                target: &str,
+                config: Option<&Target>) {
+    match target {
+        // When compiling for android we may have the NDK configured in the
+        // config.toml in which case we look there. Otherwise the default
+        // compiler already takes into account the triple in question.
+        t if t.contains("android") => {
+            if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
+                let target = target.replace("armv7", "arm");
+                let compiler = format!("{}-{}", target, gnu_compiler);
+                cfg.compiler(ndk.join("bin").join(compiler));
+            }
+        }
+
+        // The default gcc version from OpenBSD may be too old, try using egcc,
+        // which is a gcc version from ports, if this is the case.
+        t if t.contains("openbsd") => {
+            let c = cfg.get_compiler();
+            if !c.path().ends_with(gnu_compiler) {
+                return
+            }
+
+            let output = output(c.to_command().arg("--version"));
+            let i = match output.find(" 4.") {
+                Some(i) => i,
+                None => return,
+            };
+            match output[i + 3..].chars().next().unwrap() {
+                '0' ... '6' => {}
+                _ => return,
+            }
+            let alternative = format!("e{}", gnu_compiler);
+            if Command::new(&alternative).output().is_ok() {
+                cfg.compiler(alternative);
+            }
+        }
+
+        _ => {}
+    }
+}
diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs
new file mode 100644 (file)
index 0000000..879c383
--- /dev/null
@@ -0,0 +1,110 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Build configuration for Rust's release channels.
+//!
+//! Implements the stable/beta/nightly channel distinctions by setting various
+//! flags like the `unstable_features`, calculating variables like `release` and
+//! `package_vers`, and otherwise indicating to the compiler what it should
+//! print out as part of its version information.
+
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::process::Command;
+
+use build_helper::output;
+use md5;
+
+use Build;
+
+pub fn collect(build: &mut Build) {
+    // Currently the canonical source for the release number (e.g. 1.10.0) and
+    // the prerelease version (e.g. `.1`) is in `mk/main.mk`. We "parse" that
+    // here to learn about those numbers.
+    let mut main_mk = String::new();
+    t!(t!(File::open(build.src.join("mk/main.mk"))).read_to_string(&mut main_mk));
+    let mut release_num = "";
+    let mut prerelease_version = "";
+    for line in main_mk.lines() {
+        if line.starts_with("CFG_RELEASE_NUM") {
+            release_num = line.split('=').skip(1).next().unwrap().trim();
+        }
+        if line.starts_with("CFG_PRERELEASE_VERSION") {
+            prerelease_version = line.split('=').skip(1).next().unwrap().trim();
+        }
+    }
+
+    // Depending on the channel, passed in `./configure --release-channel`,
+    // determine various properties of the build.
+    match &build.config.channel[..] {
+        "stable" => {
+            build.release = release_num.to_string();
+            build.package_vers = build.release.clone();
+            build.unstable_features = false;
+        }
+        "beta" => {
+            build.release = format!("{}-beta{}", release_num,
+                                   prerelease_version);
+            build.package_vers = "beta".to_string();
+            build.unstable_features = false;
+        }
+        "nightly" => {
+            build.release = format!("{}-nightly", release_num);
+            build.package_vers = "nightly".to_string();
+            build.unstable_features = true;
+        }
+        _ => {
+            build.release = format!("{}-dev", release_num);
+            build.package_vers = build.release.clone();
+            build.unstable_features = true;
+        }
+    }
+    build.version = build.release.clone();
+
+    // If we have a git directory, add in some various SHA information of what
+    // commit this compiler was compiled from.
+    if fs::metadata(build.src.join(".git")).is_ok() {
+        let ver_date = output(Command::new("git").current_dir(&build.src)
+                                      .arg("log").arg("-1")
+                                      .arg("--date=short")
+                                      .arg("--pretty=format:%cd"));
+        let ver_hash = output(Command::new("git").current_dir(&build.src)
+                                      .arg("rev-parse").arg("HEAD"));
+        let short_ver_hash = output(Command::new("git")
+                                            .current_dir(&build.src)
+                                            .arg("rev-parse")
+                                            .arg("--short=9")
+                                            .arg("HEAD"));
+        let ver_date = ver_date.trim().to_string();
+        let ver_hash = ver_hash.trim().to_string();
+        let short_ver_hash = short_ver_hash.trim().to_string();
+        build.version.push_str(&format!(" ({} {})", short_ver_hash,
+                                       ver_date));
+        build.ver_date = Some(ver_date.to_string());
+        build.ver_hash = Some(ver_hash);
+        build.short_ver_hash = Some(short_ver_hash);
+    }
+
+    // Calculate this compiler's bootstrap key, which is currently defined as
+    // the first 8 characters of the md5 of the release string.
+    let key = md5::compute(build.release.as_bytes());
+    build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}",
+                                  key[0], key[1], key[2], key[3]);
+
+    // Slurp up the stage0 bootstrap key as we're bootstrapping from an
+    // otherwise stable compiler.
+    let mut s = String::new();
+    t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s));
+    if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) {
+        if let Some(key) = line.split(": ").nth(1) {
+            build.bootstrap_key_stage0 = key.to_string();
+        }
+    }
+}
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
new file mode 100644 (file)
index 0000000..3d8b143
--- /dev/null
@@ -0,0 +1,413 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the various `check-*` targets of the build system.
+//!
+//! This file implements the various regression test suites that we execute on
+//! our CI.
+
+use std::env;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use build_helper::output;
+
+use {Build, Compiler, Mode};
+use util::{self, dylib_path, dylib_path_var};
+
+const ADB_TEST_DIR: &'static str = "/data/tmp";
+
+/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` will verify the validity of all our links in the
+/// documentation to ensure we don't have a bunch of dead ones.
+pub fn linkcheck(build: &Build, stage: u32, host: &str) {
+    println!("Linkcheck stage{} ({})", stage, host);
+    let compiler = Compiler::new(stage, host);
+    build.run(build.tool_cmd(&compiler, "linkchecker")
+                   .arg(build.out.join(host).join("doc")));
+}
+
+/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` will check out a few Rust projects and run `cargo
+/// test` to ensure that we don't regress the test suites there.
+pub fn cargotest(build: &Build, stage: u32, host: &str) {
+    let ref compiler = Compiler::new(stage, host);
+
+    // Configure PATH to find the right rustc. NB. we have to use PATH
+    // and not RUSTC because the Cargo test suite has tests that will
+    // fail if rustc is not spelled `rustc`.
+    let path = build.sysroot(compiler).join("bin");
+    let old_path = ::std::env::var("PATH").expect("");
+    let sep = if cfg!(windows) { ";" } else {":" };
+    let ref newpath = format!("{}{}{}", path.display(), sep, old_path);
+
+    // Note that this is a short, cryptic, and not scoped directory name. This
+    // is currently to minimize the length of path on Windows where we otherwise
+    // quickly run into path name limit constraints.
+    let out_dir = build.out.join("ct");
+    t!(fs::create_dir_all(&out_dir));
+
+    build.run(build.tool_cmd(compiler, "cargotest")
+                   .env("PATH", newpath)
+                   .arg(&build.cargo)
+                   .arg(&out_dir));
+}
+
+/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
+///
+/// This tool in `src/tools` checks up on various bits and pieces of style and
+/// otherwise just implements a few lint-like checks that are specific to the
+/// compiler itself.
+pub fn tidy(build: &Build, stage: u32, host: &str) {
+    println!("tidy check stage{} ({})", stage, host);
+    let compiler = Compiler::new(stage, host);
+    build.run(build.tool_cmd(&compiler, "tidy")
+                   .arg(build.src.join("src")));
+}
+
+fn testdir(build: &Build, host: &str) -> PathBuf {
+    build.out.join(host).join("test")
+}
+
+/// Executes the `compiletest` tool to run a suite of tests.
+///
+/// Compiles all tests with `compiler` for `target` with the specified
+/// compiletest `mode` and `suite` arguments. For example `mode` can be
+/// "run-pass" or `suite` can be something like `debuginfo`.
+pub fn compiletest(build: &Build,
+                   compiler: &Compiler,
+                   target: &str,
+                   mode: &str,
+                   suite: &str) {
+    println!("Check compiletest {} ({} -> {})", suite, compiler.host, target);
+    let mut cmd = build.tool_cmd(compiler, "compiletest");
+
+    // compiletest currently has... a lot of arguments, so let's just pass all
+    // of them!
+
+    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
+    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
+    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
+    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
+    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+    cmd.arg("--mode").arg(mode);
+    cmd.arg("--target").arg(target);
+    cmd.arg("--host").arg(compiler.host);
+    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
+
+    let mut flags = vec!["-Crpath".to_string()];
+    if build.config.rust_optimize_tests {
+        flags.push("-O".to_string());
+    }
+    if build.config.rust_debuginfo_tests {
+        flags.push("-g".to_string());
+    }
+
+    let mut hostflags = build.rustc_flags(&compiler.host);
+    hostflags.extend(flags.clone());
+    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+
+    let mut targetflags = build.rustc_flags(&target);
+    targetflags.extend(flags);
+    targetflags.push(format!("-Lnative={}",
+                             build.test_helpers_out(target).display()));
+    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+
+    // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
+    let python_default = "python";
+    cmd.arg("--docck-python").arg(python_default);
+
+    if build.config.build.ends_with("apple-darwin") {
+        // Force /usr/bin/python on OSX for LLDB tests because we're loading the
+        // LLDB plugin's compiled module which only works with the system python
+        // (namely not Homebrew-installed python)
+        cmd.arg("--lldb-python").arg("/usr/bin/python");
+    } else {
+        cmd.arg("--lldb-python").arg(python_default);
+    }
+
+    if let Some(ref vers) = build.gdb_version {
+        cmd.arg("--gdb-version").arg(vers);
+    }
+    if let Some(ref vers) = build.lldb_version {
+        cmd.arg("--lldb-version").arg(vers);
+    }
+    if let Some(ref dir) = build.lldb_python_dir {
+        cmd.arg("--lldb-python-dir").arg(dir);
+    }
+
+    cmd.args(&build.flags.args);
+
+    if build.config.verbose || build.flags.verbose {
+        cmd.arg("--verbose");
+    }
+
+    // Only pass correct values for these flags for the `run-make` suite as it
+    // requires that a C++ compiler was configured which isn't always the case.
+    if suite == "run-make" {
+        let llvm_config = build.llvm_config(target);
+        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
+        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
+        cmd.arg("--cc").arg(build.cc(target))
+           .arg("--cxx").arg(build.cxx(target))
+           .arg("--cflags").arg(build.cflags(target).join(" "))
+           .arg("--llvm-components").arg(llvm_components.trim())
+           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
+    } else {
+        cmd.arg("--cc").arg("")
+           .arg("--cxx").arg("")
+           .arg("--cflags").arg("")
+           .arg("--llvm-components").arg("")
+           .arg("--llvm-cxxflags").arg("");
+    }
+
+    // Running a C compiler on MSVC requires a few env vars to be set, to be
+    // sure to set them here.
+    if target.contains("msvc") {
+        for &(ref k, ref v) in build.cc[target].0.env() {
+            if k != "PATH" {
+                cmd.env(k, v);
+            }
+        }
+    }
+    build.add_bootstrap_key(compiler, &mut cmd);
+
+    cmd.arg("--adb-path").arg("adb");
+    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
+    if target.contains("android") {
+        // Assume that cc for this target comes from the android sysroot
+        cmd.arg("--android-cross-path")
+           .arg(build.cc(target).parent().unwrap().parent().unwrap());
+    } else {
+        cmd.arg("--android-cross-path").arg("");
+    }
+
+    build.run(&mut cmd);
+}
+
+/// Run `rustdoc --test` for all documentation in `src/doc`.
+///
+/// This will run all tests in our markdown documentation (e.g. the book)
+/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
+/// `compiler`.
+pub fn docs(build: &Build, compiler: &Compiler) {
+    // Do a breadth-first traversal of the `src/doc` directory and just run
+    // tests for all files that end in `*.md`
+    let mut stack = vec![build.src.join("src/doc")];
+
+    while let Some(p) = stack.pop() {
+        if p.is_dir() {
+            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
+            continue
+        }
+
+        if p.extension().and_then(|s| s.to_str()) != Some("md") {
+            continue
+        }
+
+        println!("doc tests for: {}", p.display());
+        markdown_test(build, compiler, &p);
+    }
+}
+
+/// Run the error index generator tool to execute the tests located in the error
+/// index.
+///
+/// The `error_index_generator` tool lives in `src/tools` and is used to
+/// generate a markdown file from the error indexes of the code base which is
+/// then passed to `rustdoc --test`.
+pub fn error_index(build: &Build, compiler: &Compiler) {
+    println!("Testing error-index stage{}", compiler.stage);
+
+    let output = testdir(build, compiler.host).join("error-index.md");
+    build.run(build.tool_cmd(compiler, "error_index_generator")
+                   .arg("markdown")
+                   .arg(&output)
+                   .env("CFG_BUILD", &build.config.build));
+
+    markdown_test(build, compiler, &output);
+}
+
+fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
+    let mut cmd = Command::new(build.rustdoc(compiler));
+    build.add_rustc_lib_path(compiler, &mut cmd);
+    cmd.arg("--test");
+    cmd.arg(markdown);
+    cmd.arg("--test-args").arg(build.flags.args.join(" "));
+    build.run(&mut cmd);
+}
+
+/// Run all unit tests plus documentation tests for an entire crate DAG defined
+/// by a `Cargo.toml`
+///
+/// This is what runs tests for crates like the standard library, compiler, etc.
+/// It essentially is the driver for running `cargo test`.
+///
+/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
+/// arguments, and those arguments are discovered from `Cargo.lock`.
+pub fn krate(build: &Build,
+             compiler: &Compiler,
+             target: &str,
+             mode: Mode) {
+    let (name, path, features) = match mode {
+        Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()),
+        Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()),
+        Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()),
+        _ => panic!("can only test libraries"),
+    };
+    println!("Testing {} stage{} ({} -> {})", name, compiler.stage,
+             compiler.host, target);
+
+    // Build up the base `cargo test` command.
+    let mut cargo = build.cargo(compiler, mode, target, "test");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join(path).join("Cargo.toml"))
+         .arg("--features").arg(features);
+
+    // Generate a list of `-p` arguments to pass to the `cargo test` invocation
+    // by crawling the corresponding Cargo.lock file.
+    let lockfile = build.src.join(path).join("Cargo.lock");
+    let mut contents = String::new();
+    t!(t!(File::open(&lockfile)).read_to_string(&mut contents));
+    let mut lines = contents.lines();
+    while let Some(line) = lines.next() {
+        let prefix = "name = \"";
+        if !line.starts_with(prefix) {
+            continue
+        }
+        lines.next(); // skip `version = ...`
+
+        // skip crates.io or otherwise non-path crates
+        if let Some(line) = lines.next() {
+            if line.starts_with("source") {
+                continue
+            }
+        }
+
+        let crate_name = &line[prefix.len()..line.len() - 1];
+
+        // Right now jemalloc is our only target-specific crate in the sense
+        // that it's not present on all platforms. Custom skip it here for now,
+        // but if we add more this probably wants to get more generalized.
+        if crate_name.contains("jemalloc") {
+            continue
+        }
+
+        cargo.arg("-p").arg(crate_name);
+    }
+
+    // The tests are going to run with the *target* libraries, so we need to
+    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+    //
+    // Note that to run the compiler we need to run with the *host* libraries,
+    // but our wrapper scripts arrange for that to be the case anyway.
+    let mut dylib_path = dylib_path();
+    dylib_path.insert(0, build.sysroot_libdir(compiler, target));
+    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+    if target.contains("android") {
+        build.run(cargo.arg("--no-run"));
+        krate_android(build, compiler, target, mode);
+    } else {
+        cargo.args(&build.flags.args);
+        build.run(&mut cargo);
+    }
+}
+
+fn krate_android(build: &Build,
+                 compiler: &Compiler,
+                 target: &str,
+                 mode: Mode) {
+    let mut tests = Vec::new();
+    let out_dir = build.cargo_out(compiler, mode, target);
+    find_tests(&out_dir, target, &mut tests);
+    find_tests(&out_dir.join("deps"), target, &mut tests);
+
+    for test in tests {
+        build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
+
+        let test_file_name = test.file_name().unwrap().to_string_lossy();
+        let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
+                          ADB_TEST_DIR,
+                          compiler.stage,
+                          target,
+                          compiler.host,
+                          test_file_name);
+        let program = format!("(cd {dir}; \
+                                LD_LIBRARY_PATH=./{target} ./{test} \
+                                    --logfile {log} \
+                                    {args})",
+                              dir = ADB_TEST_DIR,
+                              target = target,
+                              test = test_file_name,
+                              log = log,
+                              args = build.flags.args.join(" "));
+
+        let output = output(Command::new("adb").arg("shell").arg(&program));
+        println!("{}", output);
+        build.run(Command::new("adb")
+                          .arg("pull")
+                          .arg(&log)
+                          .arg(build.out.join("tmp")));
+        build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
+        if !output.contains("result: ok") {
+            panic!("some tests failed");
+        }
+    }
+}
+
+fn find_tests(dir: &Path,
+              target: &str,
+              dst: &mut Vec<PathBuf>) {
+    for e in t!(dir.read_dir()).map(|e| t!(e)) {
+        let file_type = t!(e.file_type());
+        if !file_type.is_file() {
+            continue
+        }
+        let filename = e.file_name().into_string().unwrap();
+        if (target.contains("windows") && filename.ends_with(".exe")) ||
+           (!target.contains("windows") && !filename.contains(".")) {
+            dst.push(e.path());
+        }
+    }
+}
+
+pub fn android_copy_libs(build: &Build,
+                         compiler: &Compiler,
+                         target: &str) {
+    println!("Android copy libs to emulator ({})", target);
+    build.run(Command::new("adb").arg("remount"));
+    build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
+    build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
+    build.run(Command::new("adb")
+                      .arg("push")
+                      .arg(build.src.join("src/etc/adb_run_wrapper.sh"))
+                      .arg(ADB_TEST_DIR));
+
+    let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
+    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
+
+    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
+        let f = t!(f);
+        let name = f.file_name().into_string().unwrap();
+        if util::is_dylib(&name) {
+            build.run(Command::new("adb")
+                              .arg("push")
+                              .arg(f.path())
+                              .arg(&target_dir));
+        }
+    }
+}
diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs
new file mode 100644 (file)
index 0000000..a466e2e
--- /dev/null
@@ -0,0 +1,49 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of `make clean` in rustbuild.
+//!
+//! Responsible for cleaning out a build directory of all old and stale
+//! artifacts to prepare for a fresh build. Currently doesn't remove the
+//! `build/cache` directory (download cache) or the `build/$target/llvm`
+//! directory as we want that cached between builds.
+
+use std::fs;
+use std::path::Path;
+
+use Build;
+
+pub fn clean(build: &Build) {
+    rm_rf(build, "tmp".as_ref());
+    rm_rf(build, &build.out.join("tmp"));
+
+    for host in build.config.host.iter() {
+
+        let out = build.out.join(host);
+
+        rm_rf(build, &out.join("compiler-rt"));
+        rm_rf(build, &out.join("doc"));
+
+        for stage in 0..4 {
+            rm_rf(build, &out.join(format!("stage{}", stage)));
+            rm_rf(build, &out.join(format!("stage{}-std", stage)));
+            rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
+            rm_rf(build, &out.join(format!("stage{}-tools", stage)));
+            rm_rf(build, &out.join(format!("stage{}-test", stage)));
+        }
+    }
+}
+
+fn rm_rf(build: &Build, path: &Path) {
+    if path.exists() {
+        build.verbose(&format!("removing `{}`", path.display()));
+        t!(fs::remove_dir_all(path));
+    }
+}
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
new file mode 100644 (file)
index 0000000..8ec9c7f
--- /dev/null
@@ -0,0 +1,360 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of compiling various phases of the compiler and standard
+//! library.
+//!
+//! This module contains some of the real meat in the rustbuild build system
+//! which is where Cargo is used to compiler the standard library, libtest, and
+//! compiler. This module is also responsible for assembling the sysroot as it
+//! goes along from the output of the previous stage.
+
+use std::collections::HashMap;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use build_helper::output;
+
+use util::{exe, staticlib, libdir, mtime, is_dylib, copy};
+use {Build, Compiler, Mode};
+
+/// Build the standard library.
+///
+/// This will build the standard library for a particular stage of the build
+/// using the `compiler` targeting the `target` architecture. The artifacts
+/// created will also be linked into the sysroot directory.
+pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
+             compiler.host, target);
+
+    // Move compiler-rt into place as it'll be required by the compiler when
+    // building the standard library to link the dylib of libstd
+    let libdir = build.sysroot_libdir(compiler, target);
+    let _ = fs::remove_dir_all(&libdir);
+    t!(fs::create_dir_all(&libdir));
+    copy(&build.compiler_rt_built.borrow()[target],
+         &libdir.join(staticlib("compiler-rt", target)));
+
+    // Some platforms have startup objects that may be required to produce the
+    // libstd dynamic library, for example.
+    build_startup_objects(build, target, &libdir);
+
+    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
+    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
+    cargo.arg("--features").arg(build.std_features())
+         .arg("--manifest-path")
+         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"));
+
+    if let Some(target) = build.config.target_config.get(target) {
+        if let Some(ref jemalloc) = target.jemalloc {
+            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+        }
+    }
+    if let Some(ref p) = build.config.musl_root {
+        if target.contains("musl") {
+            cargo.env("MUSL_ROOT", p);
+        }
+    }
+
+    build.run(&mut cargo);
+    std_link(build, target, compiler, compiler.host);
+}
+
+/// Link all libstd rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn std_link(build: &Build,
+                target: &str,
+                compiler: &Compiler,
+                host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+
+    // If we're linking one compiler host's output into another, then we weren't
+    // called from the `std` method above. In that case we clean out what's
+    // already there and then also link compiler-rt into place.
+    if host != compiler.host {
+        let _ = fs::remove_dir_all(&libdir);
+        t!(fs::create_dir_all(&libdir));
+        copy(&build.compiler_rt_built.borrow()[target],
+             &libdir.join(staticlib("compiler-rt", target)));
+    }
+    add_to_sysroot(&out_dir, &libdir);
+
+    if target.contains("musl") &&
+       (target.contains("x86_64") || target.contains("i686")) {
+        copy_third_party_objects(build, target, &libdir);
+    }
+}
+
+/// Copies the crt(1,i,n).o startup objects
+///
+/// Only required for musl targets that statically link to libc
+fn copy_third_party_objects(build: &Build, target: &str, into: &Path) {
+    for &obj in &["crt1.o", "crti.o", "crtn.o"] {
+        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
+    }
+}
+
+/// Build and prepare startup objects like rsbegin.o and rsend.o
+///
+/// These are primarily used on Windows right now for linking executables/dlls.
+/// They don't require any library support as they're just plain old object
+/// files, so we just use the nightly snapshot compiler to always build them (as
+/// no other compilers are guaranteed to be available).
+fn build_startup_objects(build: &Build, target: &str, into: &Path) {
+    if !target.contains("pc-windows-gnu") {
+        return
+    }
+    let compiler = Compiler::new(0, &build.config.build);
+    let compiler = build.compiler_path(&compiler);
+
+    for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) {
+        let file = t!(file);
+        build.run(Command::new(&compiler)
+                          .arg("--emit=obj")
+                          .arg("--out-dir").arg(into)
+                          .arg(file.path()));
+    }
+
+    for obj in ["crt2.o", "dllcrt2.o"].iter() {
+        copy(&compiler_file(build.cc(target), obj), &into.join(obj));
+    }
+}
+
+/// Build libtest.
+///
+/// This will build libtest and supporting libraries for a particular stage of
+/// the build using the `compiler` targeting the `target` architecture. The
+/// artifacts created will also be linked into the sysroot directory.
+pub fn test<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
+             compiler.host, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+    build.clear_if_dirty(&out_dir, &libstd_shim(build, compiler, target));
+    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
+    build.run(&mut cargo);
+    test_link(build, target, compiler, compiler.host);
+}
+
+/// Link all libtest rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn test_link(build: &Build,
+                 target: &str,
+                 compiler: &Compiler,
+                 host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+    add_to_sysroot(&out_dir, &libdir);
+}
+
+/// Build the compiler.
+///
+/// This will build the compiler for a particular stage of the build using
+/// the `compiler` targeting the `target` architecture. The artifacts
+/// created will also be linked into the sysroot directory.
+pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) {
+    println!("Building stage{} compiler artifacts ({} -> {})",
+             compiler.stage, compiler.host, target);
+
+    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+    build.clear_if_dirty(&out_dir, &libtest_shim(build, compiler, target));
+
+    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
+    cargo.arg("--features").arg(build.rustc_features())
+         .arg("--manifest-path")
+         .arg(build.src.join("src/rustc/Cargo.toml"));
+
+    // Set some configuration variables picked up by build scripts and
+    // the compiler alike
+    cargo.env("CFG_RELEASE", &build.release)
+         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
+         .env("CFG_VERSION", &build.version)
+         .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key)
+         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new()))
+         .env("CFG_LIBDIR_RELATIVE", "lib");
+
+    if let Some(ref ver_date) = build.ver_date {
+        cargo.env("CFG_VER_DATE", ver_date);
+    }
+    if let Some(ref ver_hash) = build.ver_hash {
+        cargo.env("CFG_VER_HASH", ver_hash);
+    }
+    if !build.unstable_features {
+        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
+    }
+    cargo.env("LLVM_CONFIG", build.llvm_config(target));
+    if build.config.llvm_static_stdcpp {
+        cargo.env("LLVM_STATIC_STDCPP",
+                  compiler_file(build.cxx(target), "libstdc++.a"));
+    }
+    if let Some(ref s) = build.config.rustc_default_linker {
+        cargo.env("CFG_DEFAULT_LINKER", s);
+    }
+    if let Some(ref s) = build.config.rustc_default_ar {
+        cargo.env("CFG_DEFAULT_AR", s);
+    }
+    build.run(&mut cargo);
+
+    rustc_link(build, target, compiler, compiler.host);
+}
+
+/// Link all librustc rlibs/dylibs into the sysroot location.
+///
+/// Links those artifacts generated in the given `stage` for `target` produced
+/// by `compiler` into `host`'s sysroot.
+pub fn rustc_link(build: &Build,
+                  target: &str,
+                  compiler: &Compiler,
+                  host: &str) {
+    let target_compiler = Compiler::new(compiler.stage, host);
+    let libdir = build.sysroot_libdir(&target_compiler, target);
+    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+    add_to_sysroot(&out_dir, &libdir);
+}
+
+/// Cargo's output path for the standard library in a given stage, compiled
+/// by a particular compiler for the specified target.
+fn libstd_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+    build.cargo_out(compiler, Mode::Libstd, target).join("libstd_shim.rlib")
+}
+
+/// Cargo's output path for libtest in a given stage, compiled by a particular
+/// compiler for the specified target.
+fn libtest_shim(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+    build.cargo_out(compiler, Mode::Libtest, target).join("libtest_shim.rlib")
+}
+
+fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
+    let out = output(Command::new(compiler)
+                            .arg(format!("-print-file-name={}", file)));
+    PathBuf::from(out.trim())
+}
+
+/// Prepare a new compiler from the artifacts in `stage`
+///
+/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
+/// must have been previously produced by the `stage - 1` build.config.build
+/// compiler.
+pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
+    assert!(stage > 0, "the stage0 compiler isn't assembled, it's downloaded");
+    // The compiler that we're assembling
+    let target_compiler = Compiler::new(stage, host);
+
+    // The compiler that compiled the compiler we're assembling
+    let build_compiler = Compiler::new(stage - 1, &build.config.build);
+
+    // Clear out old files
+    let sysroot = build.sysroot(&target_compiler);
+    let _ = fs::remove_dir_all(&sysroot);
+    t!(fs::create_dir_all(&sysroot));
+
+    // Link in all dylibs to the libdir
+    let sysroot_libdir = sysroot.join(libdir(host));
+    t!(fs::create_dir_all(&sysroot_libdir));
+    let src_libdir = build.sysroot_libdir(&build_compiler, host);
+    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
+        let filename = f.file_name().into_string().unwrap();
+        if is_dylib(&filename) {
+            copy(&f.path(), &sysroot_libdir.join(&filename));
+        }
+    }
+
+    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
+
+    // Link the compiler binary itself into place
+    let rustc = out_dir.join(exe("rustc", host));
+    let bindir = sysroot.join("bin");
+    t!(fs::create_dir_all(&bindir));
+    let compiler = build.compiler_path(&Compiler::new(stage, host));
+    let _ = fs::remove_file(&compiler);
+    copy(&rustc, &compiler);
+
+    // See if rustdoc exists to link it into place
+    let rustdoc = exe("rustdoc", host);
+    let rustdoc_src = out_dir.join(&rustdoc);
+    let rustdoc_dst = bindir.join(&rustdoc);
+    if fs::metadata(&rustdoc_src).is_ok() {
+        let _ = fs::remove_file(&rustdoc_dst);
+        copy(&rustdoc_src, &rustdoc_dst);
+    }
+}
+
+/// Link some files into a rustc sysroot.
+///
+/// For a particular stage this will link all of the contents of `out_dir`
+/// into the sysroot of the `host` compiler, assuming the artifacts are
+/// compiled for the specified `target`.
+fn add_to_sysroot(out_dir: &Path, sysroot_dst: &Path) {
+    // Collect the set of all files in the dependencies directory, keyed
+    // off the name of the library. We assume everything is of the form
+    // `foo-<hash>.{rlib,so,...}`, and there could be multiple different
+    // `<hash>` values for the same name (of old builds).
+    let mut map = HashMap::new();
+    for file in t!(fs::read_dir(out_dir.join("deps"))).map(|f| t!(f)) {
+        let filename = file.file_name().into_string().unwrap();
+
+        // We're only interested in linking rlibs + dylibs, other things like
+        // unit tests don't get linked in
+        if !filename.ends_with(".rlib") &&
+           !filename.ends_with(".lib") &&
+           !is_dylib(&filename) {
+            continue
+        }
+        let file = file.path();
+        let dash = filename.find("-").unwrap();
+        let key = (filename[..dash].to_string(),
+                   file.extension().unwrap().to_owned());
+        map.entry(key).or_insert(Vec::new())
+           .push(file.clone());
+    }
+
+    // For all hash values found, pick the most recent one to move into the
+    // sysroot, that should be the one we just built.
+    for (_, paths) in map {
+        let (_, path) = paths.iter().map(|path| {
+            (mtime(&path).seconds(), path)
+        }).max().unwrap();
+        copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
+    }
+}
+
+/// Build a tool in `src/tools`
+///
+/// This will build the specified tool with the specified `host` compiler in
+/// `stage` into the normal cargo output directory.
+pub fn tool(build: &Build, stage: u32, host: &str, tool: &str) {
+    println!("Building stage{} tool {} ({})", stage, tool, host);
+
+    let compiler = Compiler::new(stage, host);
+
+    // FIXME: need to clear out previous tool and ideally deps, may require
+    //        isolating output directories or require a pseudo shim step to
+    //        clear out all the info.
+    //
+    //        Maybe when libstd is compiled it should clear out the rustc of the
+    //        corresponding stage?
+    // let out_dir = build.cargo_out(stage, &host, Mode::Librustc, target);
+    // build.clear_if_dirty(&out_dir, &libstd_shim(build, stage, &host, target));
+
+    let mut cargo = build.cargo(&compiler, Mode::Tool, host, "build");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join(format!("src/tools/{}/Cargo.toml", tool)));
+    build.run(&mut cargo);
+}
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
new file mode 100644 (file)
index 0000000..498196e
--- /dev/null
@@ -0,0 +1,396 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Serialized configuration of a build.
+//!
+//! This module implements parsing `config.mk` and `config.toml` configuration
+//! files to tweak how the build runs.
+
+use std::collections::HashMap;
+use std::env;
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::PathBuf;
+use std::process;
+
+use num_cpus;
+use rustc_serialize::Decodable;
+use toml::{Parser, Decoder, Value};
+
+/// Global configuration for the entire build and/or bootstrap.
+///
+/// This structure is derived from a combination of both `config.toml` and
+/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
+/// is used all that much, so this is primarily filled out by `config.mk` which
+/// is generated from `./configure`.
+///
+/// Note that this structure is not decoded directly into, but rather it is
+/// filled out from the decoded forms of the structs below. For documentation
+/// each field, see the corresponding fields in
+/// `src/bootstrap/config.toml.example`.
+#[derive(Default)]
+pub struct Config {
+    pub ccache: bool,
+    pub ninja: bool,
+    pub verbose: bool,
+    pub submodules: bool,
+    pub compiler_docs: bool,
+    pub docs: bool,
+    pub target_config: HashMap<String, Target>,
+
+    // llvm codegen options
+    pub llvm_assertions: bool,
+    pub llvm_optimize: bool,
+    pub llvm_version_check: bool,
+    pub llvm_static_stdcpp: bool,
+
+    // rust codegen options
+    pub rust_optimize: bool,
+    pub rust_codegen_units: u32,
+    pub rust_debug_assertions: bool,
+    pub rust_debuginfo: bool,
+    pub rust_rpath: bool,
+    pub rustc_default_linker: Option<String>,
+    pub rustc_default_ar: Option<String>,
+    pub rust_optimize_tests: bool,
+    pub rust_debuginfo_tests: bool,
+
+    pub build: String,
+    pub host: Vec<String>,
+    pub target: Vec<String>,
+    pub rustc: Option<PathBuf>,
+    pub cargo: Option<PathBuf>,
+    pub local_rebuild: bool,
+
+    // libstd features
+    pub debug_jemalloc: bool,
+    pub use_jemalloc: bool,
+
+    // misc
+    pub channel: String,
+    pub musl_root: Option<PathBuf>,
+    pub prefix: Option<String>,
+}
+
+/// Per-target configuration stored in the global configuration structure.
+#[derive(Default)]
+pub struct Target {
+    pub llvm_config: Option<PathBuf>,
+    pub jemalloc: Option<PathBuf>,
+    pub cc: Option<PathBuf>,
+    pub cxx: Option<PathBuf>,
+    pub ndk: Option<PathBuf>,
+}
+
+/// Structure of the `config.toml` file that configuration is read from.
+///
+/// This structure uses `Decodable` to automatically decode a TOML configuration
+/// file into this format, and then this is traversed and written into the above
+/// `Config` structure.
+#[derive(RustcDecodable, Default)]
+struct TomlConfig {
+    build: Option<Build>,
+    llvm: Option<Llvm>,
+    rust: Option<Rust>,
+    target: Option<HashMap<String, TomlTarget>>,
+}
+
+/// TOML representation of various global build decisions.
+#[derive(RustcDecodable, Default, Clone)]
+struct Build {
+    build: Option<String>,
+    host: Vec<String>,
+    target: Vec<String>,
+    cargo: Option<String>,
+    rustc: Option<String>,
+    compiler_docs: Option<bool>,
+    docs: Option<bool>,
+}
+
+/// TOML representation of how the LLVM build is configured.
+#[derive(RustcDecodable, Default)]
+struct Llvm {
+    ccache: Option<bool>,
+    ninja: Option<bool>,
+    assertions: Option<bool>,
+    optimize: Option<bool>,
+    version_check: Option<bool>,
+    static_libstdcpp: Option<bool>,
+}
+
+/// TOML representation of how the Rust build is configured.
+#[derive(RustcDecodable, Default)]
+struct Rust {
+    optimize: Option<bool>,
+    codegen_units: Option<u32>,
+    debug_assertions: Option<bool>,
+    debuginfo: Option<bool>,
+    debug_jemalloc: Option<bool>,
+    use_jemalloc: Option<bool>,
+    default_linker: Option<String>,
+    default_ar: Option<String>,
+    channel: Option<String>,
+    musl_root: Option<String>,
+    rpath: Option<bool>,
+    optimize_tests: Option<bool>,
+    debuginfo_tests: Option<bool>,
+}
+
+/// TOML representation of how each build target is configured.
+#[derive(RustcDecodable, Default)]
+struct TomlTarget {
+    llvm_config: Option<String>,
+    jemalloc: Option<String>,
+    cc: Option<String>,
+    cxx: Option<String>,
+    android_ndk: Option<String>,
+}
+
+impl Config {
+    pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
+        let mut config = Config::default();
+        config.llvm_optimize = true;
+        config.use_jemalloc = true;
+        config.rust_optimize = true;
+        config.rust_optimize_tests = true;
+        config.submodules = true;
+        config.docs = true;
+        config.rust_rpath = true;
+        config.rust_codegen_units = 1;
+        config.build = build.to_string();
+        config.channel = "dev".to_string();
+
+        let toml = file.map(|file| {
+            let mut f = t!(File::open(&file));
+            let mut toml = String::new();
+            t!(f.read_to_string(&mut toml));
+            let mut p = Parser::new(&toml);
+            let table = match p.parse() {
+                Some(table) => table,
+                None => {
+                    println!("failed to parse TOML configuration:");
+                    for err in p.errors.iter() {
+                        let (loline, locol) = p.to_linecol(err.lo);
+                        let (hiline, hicol) = p.to_linecol(err.hi);
+                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
+                                 hicol, err.desc);
+                    }
+                    process::exit(2);
+                }
+            };
+            let mut d = Decoder::new(Value::Table(table));
+            match Decodable::decode(&mut d) {
+                Ok(cfg) => cfg,
+                Err(e) => {
+                    println!("failed to decode TOML: {}", e);
+                    process::exit(2);
+                }
+            }
+        }).unwrap_or_else(|| TomlConfig::default());
+
+        let build = toml.build.clone().unwrap_or(Build::default());
+        set(&mut config.build, build.build.clone());
+        config.host.push(config.build.clone());
+        for host in build.host.iter() {
+            if !config.host.contains(host) {
+                config.host.push(host.clone());
+            }
+        }
+        for target in config.host.iter().chain(&build.target) {
+            if !config.target.contains(target) {
+                config.target.push(target.clone());
+            }
+        }
+        config.rustc = build.rustc.map(PathBuf::from);
+        config.cargo = build.cargo.map(PathBuf::from);
+        set(&mut config.compiler_docs, build.compiler_docs);
+        set(&mut config.docs, build.docs);
+
+        if let Some(ref llvm) = toml.llvm {
+            set(&mut config.ccache, llvm.ccache);
+            set(&mut config.ninja, llvm.ninja);
+            set(&mut config.llvm_assertions, llvm.assertions);
+            set(&mut config.llvm_optimize, llvm.optimize);
+            set(&mut config.llvm_version_check, llvm.version_check);
+            set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
+        }
+        if let Some(ref rust) = toml.rust {
+            set(&mut config.rust_debug_assertions, rust.debug_assertions);
+            set(&mut config.rust_debuginfo, rust.debuginfo);
+            set(&mut config.rust_optimize, rust.optimize);
+            set(&mut config.rust_optimize_tests, rust.optimize_tests);
+            set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
+            set(&mut config.rust_rpath, rust.rpath);
+            set(&mut config.debug_jemalloc, rust.debug_jemalloc);
+            set(&mut config.use_jemalloc, rust.use_jemalloc);
+            set(&mut config.channel, rust.channel.clone());
+            config.rustc_default_linker = rust.default_linker.clone();
+            config.rustc_default_ar = rust.default_ar.clone();
+            config.musl_root = rust.musl_root.clone().map(PathBuf::from);
+
+            match rust.codegen_units {
+                Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
+                Some(n) => config.rust_codegen_units = n,
+                None => {}
+            }
+        }
+
+        if let Some(ref t) = toml.target {
+            for (triple, cfg) in t {
+                let mut target = Target::default();
+
+                if let Some(ref s) = cfg.llvm_config {
+                    target.llvm_config = Some(env::current_dir().unwrap().join(s));
+                }
+                if let Some(ref s) = cfg.jemalloc {
+                    target.jemalloc = Some(env::current_dir().unwrap().join(s));
+                }
+                if let Some(ref s) = cfg.android_ndk {
+                    target.ndk = Some(env::current_dir().unwrap().join(s));
+                }
+                target.cxx = cfg.cxx.clone().map(PathBuf::from);
+                target.cc = cfg.cc.clone().map(PathBuf::from);
+
+                config.target_config.insert(triple.clone(), target);
+            }
+        }
+
+        return config
+    }
+
+    /// "Temporary" routine to parse `config.mk` into this configuration.
+    ///
+    /// While we still have `./configure` this implements the ability to decode
+    /// that configuration into this. This isn't exactly a full-blown makefile
+    /// parser, but hey it gets the job done!
+    pub fn update_with_config_mk(&mut self) {
+        let mut config = String::new();
+        File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
+        for line in config.lines() {
+            let mut parts = line.splitn(2, ":=").map(|s| s.trim());
+            let key = parts.next().unwrap();
+            let value = match parts.next() {
+                Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
+                Some(n) => n,
+                None => continue
+            };
+
+            macro_rules! check {
+                ($(($name:expr, $val:expr),)*) => {
+                    if value == "1" {
+                        $(
+                            if key == concat!("CFG_ENABLE_", $name) {
+                                $val = true;
+                                continue
+                            }
+                            if key == concat!("CFG_DISABLE_", $name) {
+                                $val = false;
+                                continue
+                            }
+                        )*
+                    }
+                }
+            }
+
+            check! {
+                ("CCACHE", self.ccache),
+                ("MANAGE_SUBMODULES", self.submodules),
+                ("COMPILER_DOCS", self.compiler_docs),
+                ("DOCS", self.docs),
+                ("LLVM_ASSERTIONS", self.llvm_assertions),
+                ("OPTIMIZE_LLVM", self.llvm_optimize),
+                ("LLVM_VERSION_CHECK", self.llvm_version_check),
+                ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
+                ("OPTIMIZE", self.rust_optimize),
+                ("DEBUG_ASSERTIONS", self.rust_debug_assertions),
+                ("DEBUGINFO", self.rust_debuginfo),
+                ("JEMALLOC", self.use_jemalloc),
+                ("DEBUG_JEMALLOC", self.debug_jemalloc),
+                ("RPATH", self.rust_rpath),
+                ("OPTIMIZE_TESTS", self.rust_optimize_tests),
+                ("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
+                ("LOCAL_REBUILD", self.local_rebuild),
+            }
+
+            match key {
+                "CFG_BUILD" => self.build = value.to_string(),
+                "CFG_HOST" => {
+                    self.host = value.split(" ").map(|s| s.to_string())
+                                     .collect();
+                }
+                "CFG_TARGET" => {
+                    self.target = value.split(" ").map(|s| s.to_string())
+                                       .collect();
+                }
+                "CFG_MUSL_ROOT" if value.len() > 0 => {
+                    self.musl_root = Some(PathBuf::from(value));
+                }
+                "CFG_DEFAULT_AR" if value.len() > 0 => {
+                    self.rustc_default_ar = Some(value.to_string());
+                }
+                "CFG_DEFAULT_LINKER" if value.len() > 0 => {
+                    self.rustc_default_linker = Some(value.to_string());
+                }
+                "CFG_RELEASE_CHANNEL" => {
+                    self.channel = value.to_string();
+                }
+                "CFG_PREFIX" => {
+                    self.prefix = Some(value.to_string());
+                }
+                "CFG_LLVM_ROOT" if value.len() > 0 => {
+                    let target = self.target_config.entry(self.build.clone())
+                                     .or_insert(Target::default());
+                    let root = PathBuf::from(value);
+                    target.llvm_config = Some(root.join("bin/llvm-config"));
+                }
+                "CFG_JEMALLOC_ROOT" if value.len() > 0 => {
+                    let target = self.target_config.entry(self.build.clone())
+                                     .or_insert(Target::default());
+                    target.jemalloc = Some(PathBuf::from(value));
+                }
+                "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
+                    let target = "arm-linux-androideabi".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
+                    let target = "armv7-linux-androideabi".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
+                    let target = "i686-linux-android".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
+                    let target = "aarch64-linux-android".to_string();
+                    let target = self.target_config.entry(target)
+                                     .or_insert(Target::default());
+                    target.ndk = Some(PathBuf::from(value));
+                }
+                "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
+                    self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
+                    self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
+                }
+                _ => {}
+            }
+        }
+    }
+}
+
+fn set<T>(field: &mut T, val: Option<T>) {
+    if let Some(v) = val {
+        *field = v;
+    }
+}
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
new file mode 100644 (file)
index 0000000..1cf71c3
--- /dev/null
@@ -0,0 +1,319 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Implementation of the various distribution aspects of the compiler.
+//!
+//! This module is responsible for creating tarballs of the standard library,
+//! compiler, and documentation. This ends up being what we distribute to
+//! everyone as well.
+//!
+//! No tarball is actually created literally in this file, but rather we shell
+//! out to `rust-installer` still. This may one day be replaced with bits and
+//! pieces of `rustup.rs`!
+
+use std::fs::{self, File};
+use std::io::Write;
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use {Build, Compiler};
+use util::{cp_r, libdir, is_dylib};
+
+fn package_vers(build: &Build) -> &str {
+    match &build.config.channel[..] {
+        "stable" => &build.release,
+        "beta" => "beta",
+        "nightly" => "nightly",
+        _ => &build.release,
+    }
+}
+
+fn distdir(build: &Build) -> PathBuf {
+    build.out.join("dist")
+}
+
+fn tmpdir(build: &Build) -> PathBuf {
+    build.out.join("tmp/dist")
+}
+
+/// Builds the `rust-docs` installer component.
+///
+/// Slurps up documentation from the `stage`'s `host`.
+pub fn docs(build: &Build, stage: u32, host: &str) {
+    println!("Dist docs stage{} ({})", stage, host);
+    let name = format!("rust-docs-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, name));
+    let _ = fs::remove_dir_all(&image);
+
+    let dst = image.join("share/doc/rust/html");
+    t!(fs::create_dir_all(&dst));
+    let src = build.out.join(host).join("doc");
+    cp_r(&src, &dst);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust-Documentation")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-documentation-is-installed.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rust-docs")
+       .arg("--legacy-manifest-dirs=rustlib,cargo")
+       .arg("--bulk-dirs=share/doc/rust/html");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+
+    // As part of this step, *also* copy the docs directory to a directory which
+    // buildbot typically uploads.
+    if host == build.config.build {
+        let dst = distdir(build).join("doc").join(&build.package_vers);
+        t!(fs::create_dir_all(&dst));
+        cp_r(&src, &dst);
+    }
+}
+
+/// Build the `rust-mingw` installer component.
+///
+/// This contains all the bits and pieces to run the MinGW Windows targets
+/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
+/// Currently just shells out to a python script, but that should be rewritten
+/// in Rust.
+pub fn mingw(build: &Build, host: &str) {
+    println!("Dist mingw ({})", host);
+    let name = format!("rust-mingw-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+    let _ = fs::remove_dir_all(&image);
+
+    // The first argument to the script is a "temporary directory" which is just
+    // thrown away (this contains the runtime DLLs included in the rustc package
+    // above) and the second argument is where to place all the MinGW components
+    // (which is what we want).
+    //
+    // FIXME: this script should be rewritten into Rust
+    let mut cmd = Command::new("python");
+    cmd.arg(build.src.join("src/etc/make-win-dist.py"))
+       .arg(tmpdir(build))
+       .arg(&image)
+       .arg(host);
+    build.run(&mut cmd);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust-MinGW")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-MinGW-is-installed.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rust-mingw")
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+}
+
+/// Creates the `rustc` installer component.
+pub fn rustc(build: &Build, stage: u32, host: &str) {
+    println!("Dist rustc stage{} ({})", stage, host);
+    let name = format!("rustc-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+    let _ = fs::remove_dir_all(&image);
+    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
+    let _ = fs::remove_dir_all(&overlay);
+
+    // Prepare the rustc "image", what will actually end up getting installed
+    prepare_image(build, stage, host, &image);
+
+    // Prepare the overlay which is part of the tarball but won't actually be
+    // installed
+    let cp = |file: &str| {
+        install(&build.src.join(file), &overlay, 0o644);
+    };
+    cp("COPYRIGHT");
+    cp("LICENSE-APACHE");
+    cp("LICENSE-MIT");
+    cp("README.md");
+    // tiny morsel of metadata is used by rust-packaging
+    let version = &build.version;
+    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+    // On MinGW we've got a few runtime DLL dependencies that we need to
+    // include. The first argument to this script is where to put these DLLs
+    // (the image we're creating), and the second argument is a junk directory
+    // to ignore all other MinGW stuff the script creates.
+    //
+    // On 32-bit MinGW we're always including a DLL which needs some extra
+    // licenses to distribute. On 64-bit MinGW we don't actually distribute
+    // anything requiring us to distribute a license, but it's likely the
+    // install will *also* include the rust-mingw package, which also needs
+    // licenses, so to be safe we just include it here in all MinGW packages.
+    //
+    // FIXME: this script should be rewritten into Rust
+    if host.contains("pc-windows-gnu") {
+        let mut cmd = Command::new("python");
+        cmd.arg(build.src.join("src/etc/make-win-dist.py"))
+           .arg(&image)
+           .arg(tmpdir(build))
+           .arg(host);
+        build.run(&mut cmd);
+
+        let dst = image.join("share/doc");
+        t!(fs::create_dir_all(&dst));
+        cp_r(&build.src.join("src/etc/third-party"), &dst);
+    }
+
+    // Finally, wrap everything up in a nice tarball!
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=Rust-is-ready-to-roll.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+       .arg(format!("--package-name={}-{}", name, host))
+       .arg("--component-name=rustc")
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+    t!(fs::remove_dir_all(&overlay));
+
+    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
+        let src = build.sysroot(&Compiler::new(stage, host));
+        let libdir = libdir(host);
+
+        // Copy rustc/rustdoc binaries
+        t!(fs::create_dir_all(image.join("bin")));
+        cp_r(&src.join("bin"), &image.join("bin"));
+
+        // Copy runtime DLLs needed by the compiler
+        if libdir != "bin" {
+            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
+                let name = entry.file_name();
+                if let Some(s) = name.to_str() {
+                    if is_dylib(s) {
+                        install(&entry.path(), &image.join(libdir), 0o644);
+                    }
+                }
+            }
+        }
+
+        // Man pages
+        t!(fs::create_dir_all(image.join("share/man/man1")));
+        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
+
+        // Debugger scripts
+        debugger_scripts(build, &image, host);
+
+        // Misc license info
+        let cp = |file: &str| {
+            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+        };
+        cp("COPYRIGHT");
+        cp("LICENSE-APACHE");
+        cp("LICENSE-MIT");
+        cp("README.md");
+    }
+}
+
+/// Copies debugger scripts for `host` into the `sysroot` specified.
+pub fn debugger_scripts(build: &Build,
+                        sysroot: &Path,
+                        host: &str) {
+    let cp_debugger_script = |file: &str| {
+        let dst = sysroot.join("lib/rustlib/etc");
+        t!(fs::create_dir_all(&dst));
+        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+    };
+    if host.contains("windows-msvc") {
+        // no debugger scripts
+    } else {
+        cp_debugger_script("debugger_pretty_printers_common.py");
+
+        // gdb debugger scripts
+        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+                0o755);
+
+        cp_debugger_script("gdb_load_rust_pretty_printers.py");
+        cp_debugger_script("gdb_rust_pretty_printing.py");
+
+        // lldb debugger scripts
+        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+                0o755);
+
+        cp_debugger_script("lldb_rust_formatters.py");
+    }
+}
+
+/// Creates the `rust-std` installer component as compiled by `compiler` for the
+/// target `target`.
+pub fn std(build: &Build, compiler: &Compiler, target: &str) {
+    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
+             target);
+    let name = format!("rust-std-{}", package_vers(build));
+    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+    let _ = fs::remove_dir_all(&image);
+
+    let dst = image.join("lib/rustlib").join(target);
+    t!(fs::create_dir_all(&dst));
+    let src = build.sysroot(compiler).join("lib/rustlib");
+    cp_r(&src.join(target), &dst);
+
+    let mut cmd = Command::new("sh");
+    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+       .arg("--product-name=Rust")
+       .arg("--rel-manifest-dir=rustlib")
+       .arg("--success-message=std-is-standing-at-the-ready.")
+       .arg(format!("--image-dir={}", sanitize_sh(&image)))
+       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
+       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg(format!("--package-name={}-{}", name, target))
+       .arg(format!("--component-name=rust-std-{}", target))
+       .arg("--legacy-manifest-dirs=rustlib,cargo");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&image));
+}
+
+fn install(src: &Path, dstdir: &Path, perms: u32) {
+    let dst = dstdir.join(src.file_name().unwrap());
+    t!(fs::create_dir_all(dstdir));
+    t!(fs::copy(src, &dst));
+    chmod(&dst, perms);
+}
+
+#[cfg(unix)]
+fn chmod(path: &Path, perms: u32) {
+    use std::os::unix::fs::*;
+    t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
+}
+#[cfg(windows)]
+fn chmod(_path: &Path, _perms: u32) {}
+
+// We have to run a few shell scripts, which choke quite a bit on both `\`
+// characters and on `C:\` paths, so normalize both of them away.
+fn sanitize_sh(path: &Path) -> String {
+    let path = path.to_str().unwrap().replace("\\", "/");
+    return change_drive(&path).unwrap_or(path);
+
+    fn change_drive(s: &str) -> Option<String> {
+        let mut ch = s.chars();
+        let drive = ch.next().unwrap_or('C');
+        if ch.next() != Some(':') {
+            return None
+        }
+        if ch.next() != Some('/') {
+            return None
+        }
+        Some(format!("/{}/{}", drive, &s[drive.len_utf8() + 2..]))
+    }
+}
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
new file mode 100644 (file)
index 0000000..ac90ab5
--- /dev/null
@@ -0,0 +1,207 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Documentation generation for rustbuild.
+//!
+//! This module implements generation for all bits and pieces of documentation
+//! for the Rust project. This notably includes suites like the rust book, the
+//! nomicon, standalone documentation, etc.
+//!
+//! Everything here is basically just a shim around calling either `rustbook` or
+//! `rustdoc`.
+
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::path::Path;
+use std::process::Command;
+
+use {Build, Compiler, Mode};
+use util::{up_to_date, cp_r};
+
+/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
+/// `name` into the `out` path.
+///
+/// This will not actually generate any documentation if the documentation has
+/// already been generated.
+pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) {
+    t!(fs::create_dir_all(out));
+
+    let out = out.join(name);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let src = build.src.join("src/doc").join(name);
+    let index = out.join("index.html");
+    let rustbook = build.tool(&compiler, "rustbook");
+    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
+        return
+    }
+    println!("Rustbook stage{} ({}) - {}", stage, target, name);
+    let _ = fs::remove_dir_all(&out);
+    build.run(build.tool_cmd(&compiler, "rustbook")
+                   .arg("build")
+                   .arg(&src)
+                   .arg(out));
+}
+
+/// Generates all standalone documentation as compiled by the rustdoc in `stage`
+/// for the `target` into `out`.
+///
+/// This will list all of `src/doc` looking for markdown files and appropriately
+/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
+/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
+///
+/// In the end, this is just a glorified wrapper around rustdoc!
+pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} standalone ({})", stage, target);
+    t!(fs::create_dir_all(out));
+
+    let compiler = Compiler::new(stage, &build.config.build);
+
+    let favicon = build.src.join("src/doc/favicon.inc");
+    let footer = build.src.join("src/doc/footer.inc");
+    let full_toc = build.src.join("src/doc/full-toc.inc");
+    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
+
+    let version_input = build.src.join("src/doc/version_info.html.template");
+    let version_info = out.join("version_info.html");
+
+    if !up_to_date(&version_input, &version_info) {
+        let mut info = String::new();
+        t!(t!(File::open(&version_input)).read_to_string(&mut info));
+        let blank = String::new();
+        let short = build.short_ver_hash.as_ref().unwrap_or(&blank);
+        let hash = build.ver_hash.as_ref().unwrap_or(&blank);
+        let info = info.replace("VERSION", &build.release)
+                       .replace("SHORT_HASH", short)
+                       .replace("STAMP", hash);
+        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
+    }
+
+    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
+        let file = t!(file);
+        let path = file.path();
+        let filename = path.file_name().unwrap().to_str().unwrap();
+        if !filename.ends_with(".md") || filename == "README.md" {
+            continue
+        }
+
+        let html = out.join(filename).with_extension("html");
+        let rustdoc = build.rustdoc(&compiler);
+        if up_to_date(&path, &html) &&
+           up_to_date(&footer, &html) &&
+           up_to_date(&favicon, &html) &&
+           up_to_date(&full_toc, &html) &&
+           up_to_date(&version_info, &html) &&
+           up_to_date(&rustdoc, &html) {
+            continue
+        }
+
+        let mut cmd = Command::new(&rustdoc);
+        build.add_rustc_lib_path(&compiler, &mut cmd);
+        cmd.arg("--html-after-content").arg(&footer)
+           .arg("--html-before-content").arg(&version_info)
+           .arg("--html-in-header").arg(&favicon)
+           .arg("--markdown-playground-url")
+           .arg("https://play.rust-lang.org/")
+           .arg("-o").arg(out)
+           .arg(&path);
+
+        if filename == "reference.md" {
+           cmd.arg("--html-in-header").arg(&full_toc);
+        }
+
+        if filename == "not_found.md" {
+            cmd.arg("--markdown-no-toc")
+               .arg("--markdown-css")
+               .arg("https://doc.rust-lang.org/rust.css");
+        } else {
+            cmd.arg("--markdown-css").arg("rust.css");
+        }
+        build.run(&mut cmd);
+    }
+}
+
+/// Compile all standard library documentation.
+///
+/// This will generate all documentation for the standard library and its
+/// dependencies. This is largely just a wrapper around `cargo doc`.
+pub fn std(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} std ({})", stage, target);
+    t!(fs::create_dir_all(out));
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Libstd)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+
+    build.clear_if_dirty(&out_dir, &rustdoc);
+
+    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/std_shim/Cargo.toml"))
+         .arg("--features").arg(build.std_features());
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Compile all libtest documentation.
+///
+/// This will generate all documentation for libtest and its dependencies. This
+/// is largely just a wrapper around `cargo doc`.
+pub fn test(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} test ({})", stage, target);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Libtest)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+
+    build.clear_if_dirty(&out_dir, &rustdoc);
+
+    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/test_shim/Cargo.toml"));
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Generate all compiler documentation.
+///
+/// This will generate all documentation for the compiler libraries and their
+/// dependencies. This is largely just a wrapper around `cargo doc`.
+pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} compiler ({})", stage, target);
+    let compiler = Compiler::new(stage, &build.config.build);
+    let out_dir = build.stage_out(&compiler, Mode::Librustc)
+                       .join(target).join("doc");
+    let rustdoc = build.rustdoc(&compiler);
+    if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) {
+        t!(fs::remove_dir_all(&out_dir));
+    }
+    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
+    cargo.arg("--manifest-path")
+         .arg(build.src.join("src/rustc/Cargo.toml"))
+         .arg("--features").arg(build.rustc_features());
+    build.run(&mut cargo);
+    cp_r(&out_dir, out)
+}
+
+/// Generates the HTML rendered error-index by running the
+/// `error_index_generator` tool.
+pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) {
+    println!("Documenting stage{} error index ({})", stage, target);
+    t!(fs::create_dir_all(out));
+    let compiler = Compiler::new(stage, &build.config.build);
+    let mut index = build.tool_cmd(&compiler, "error_index_generator");
+    index.arg("html");
+    index.arg(out.join("error-index.html"));
+
+    // FIXME: shouldn't have to pass this env var
+    index.env("CFG_BUILD", &build.config.build);
+
+    build.run(&mut index);
+}
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
new file mode 100644 (file)
index 0000000..d925997
--- /dev/null
@@ -0,0 +1,103 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Command-line interface of the rustbuild build system.
+//!
+//! This module implements the command-line parsing of the build system which
+//! has various flags to configure how it's run.
+
+use std::fs;
+use std::path::PathBuf;
+use std::process;
+use std::slice;
+
+use getopts::Options;
+
+/// Deserialized version of all flags for this compile.
+pub struct Flags {
+    pub verbose: bool,
+    pub stage: Option<u32>,
+    pub build: String,
+    pub host: Filter,
+    pub target: Filter,
+    pub step: Vec<String>,
+    pub config: Option<PathBuf>,
+    pub src: Option<PathBuf>,
+    pub jobs: Option<u32>,
+    pub args: Vec<String>,
+    pub clean: bool,
+}
+
+pub struct Filter {
+    values: Vec<String>,
+}
+
+impl Flags {
+    pub fn parse(args: &[String]) -> Flags {
+        let mut opts = Options::new();
+        opts.optflag("v", "verbose", "use verbose output");
+        opts.optopt("", "config", "TOML configuration file for build", "FILE");
+        opts.optmulti("", "host", "host targets to build", "HOST");
+        opts.reqopt("", "build", "build target of the stage0 compiler", "BUILD");
+        opts.optmulti("", "target", "targets to build", "TARGET");
+        opts.optmulti("s", "step", "build step to execute", "STEP");
+        opts.optopt("", "stage", "stage to build", "N");
+        opts.optopt("", "src", "path to repo root", "DIR");
+        opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
+        opts.optflag("", "clean", "clean output directory");
+        opts.optflag("h", "help", "print this help message");
+
+        let usage = |n| -> ! {
+            let brief = format!("Usage: rust.py [options]");
+            print!("{}", opts.usage(&brief));
+            process::exit(n);
+        };
+
+        let m = opts.parse(args).unwrap_or_else(|e| {
+            println!("failed to parse options: {}", e);
+            usage(1);
+        });
+        if m.opt_present("h") {
+            usage(0);
+        }
+
+        let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
+            if fs::metadata("config.toml").is_ok() {
+                Some(PathBuf::from("config.toml"))
+            } else {
+                None
+            }
+        });
+
+        Flags {
+            verbose: m.opt_present("v"),
+            clean: m.opt_present("clean"),
+            stage: m.opt_str("stage").map(|j| j.parse().unwrap()),
+            build: m.opt_str("build").unwrap(),
+            host: Filter { values: m.opt_strs("host") },
+            target: Filter { values: m.opt_strs("target") },
+            step: m.opt_strs("step"),
+            config: cfg_file,
+            src: m.opt_str("src").map(PathBuf::from),
+            jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
+            args: m.free.clone(),
+        }
+    }
+}
+
+impl Filter {
+    pub fn contains(&self, name: &str) -> bool {
+        self.values.len() == 0 || self.values.iter().any(|s| s == name)
+    }
+
+    pub fn iter(&self) -> slice::Iter<String> {
+        self.values.iter()
+    }
+}
diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs
new file mode 100644 (file)
index 0000000..4558e6f
--- /dev/null
@@ -0,0 +1,111 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Job management on Windows for bootstrapping
+//!
+//! Most of the time when you're running a build system (e.g. make) you expect
+//! Ctrl-C or abnormal termination to actually terminate the entire tree of
+//! process in play, not just the one at the top. This currently works "by
+//! default" on Unix platforms because Ctrl-C actually sends a signal to the
+//! *process group* rather than the parent process, so everything will get torn
+//! down. On Windows, however, this does not happen and Ctrl-C just kills the
+//! parent process.
+//!
+//! To achieve the same semantics on Windows we use Job Objects to ensure that
+//! all processes die at the same time. Job objects have a mode of operation
+//! where when all handles to the object are closed it causes all child
+//! processes associated with the object to be terminated immediately.
+//! Conveniently whenever a process in the job object spawns a new process the
+//! child will be associated with the job object as well. This means if we add
+//! ourselves to the job object we create then everything will get torn down!
+//!
+//! Unfortunately most of the time the build system is actually called from a
+//! python wrapper (which manages things like building the build system) so this
+//! all doesn't quite cut it so far. To go the last mile we duplicate the job
+//! object handle into our parent process (a python process probably) and then
+//! close our own handle. This means that the only handle to the job object
+//! resides in the parent python process, so when python dies the whole build
+//! system dies (as one would probably expect!).
+//!
+//! Note that this module has a #[cfg(windows)] above it as none of this logic
+//! is required on Unix.
+
+extern crate kernel32;
+extern crate winapi;
+
+use std::env;
+use std::io;
+use std::mem;
+
+use self::winapi::*;
+use self::kernel32::*;
+
+pub unsafe fn setup() {
+    // Create a new job object for us to use
+    let job = CreateJobObjectW(0 as *mut _, 0 as *const _);
+    assert!(job != 0 as *mut _, "{}", io::Error::last_os_error());
+
+    // Indicate that when all handles to the job object are gone that all
+    // process in the object should be killed. Note that this includes our
+    // entire process tree by default because we've added ourselves and our
+    // children will reside in the job by default.
+    let mut info = mem::zeroed::<JOBOBJECT_EXTENDED_LIMIT_INFORMATION>();
+    info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+    let r = SetInformationJobObject(job,
+                                    JobObjectExtendedLimitInformation,
+                                    &mut info as *mut _ as LPVOID,
+                                    mem::size_of_val(&info) as DWORD);
+    assert!(r != 0, "{}", io::Error::last_os_error());
+
+    // Assign our process to this job object. Note that if this fails, one very
+    // likely reason is that we are ourselves already in a job object! This can
+    // happen on the build bots that we've got for Windows, or if just anyone
+    // else is instrumenting the build. In this case we just bail out
+    // immediately and assume that they take care of it.
+    //
+    // Also note that nested jobs (why this might fail) are supported in recent
+    // versions of Windows, but the version of Windows that our bots are running
+    // at least don't support nested job objects.
+    let r = AssignProcessToJobObject(job, GetCurrentProcess());
+    if r == 0 {
+        CloseHandle(job);
+        return
+    }
+
+    // If we've got a parent process (e.g. the python script that called us)
+    // then move ownership of this job object up to them. That way if the python
+    // script is killed (e.g. via ctrl-c) then we'll all be torn down.
+    //
+    // If we don't have a parent (e.g. this was run directly) then we
+    // intentionally leak the job object handle. When our process exits
+    // (normally or abnormally) it will close the handle implicitly, causing all
+    // processes in the job to be cleaned up.
+    let pid = match env::var("BOOTSTRAP_PARENT_ID") {
+        Ok(s) => s,
+        Err(..) => return,
+    };
+
+    let parent = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid.parse().unwrap());
+    assert!(parent != 0 as *mut _, "{}", io::Error::last_os_error());
+    let mut parent_handle = 0 as *mut _;
+    let r = DuplicateHandle(GetCurrentProcess(), job,
+                            parent, &mut parent_handle,
+                            0, FALSE, DUPLICATE_SAME_ACCESS);
+
+    // If this failed, well at least we tried! An example of DuplicateHandle
+    // failing in the past has been when the wrong python2 package spawed this
+    // build system (e.g. the `python2` package in MSYS instead of
+    // `mingw-w64-x86_64-python2`. Not sure why it failed, but the "failure
+    // mode" here is that we only clean everything up when the build system
+    // dies, not when the python parent does, so not too bad.
+    if r != 0 {
+        CloseHandle(job);
+    }
+}
index ef6184d6ca76cf8fc3dbbb6b40e6dd0452a75098..943271fc8a641665734531b3393b32d4f37d1e5e 100644 (file)
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! A small helper library shared between the build system's executables
+//! Implementation of rustbuild, the Rust build system.
 //!
-//! Currently this just has some simple utilities for modifying the dynamic
-//! library lookup path.
+//! This module, and its descendants, are the implementation of the Rust build
+//! system. Most of this build system is backed by Cargo but the outer layer
+//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
+//! builds, building artifacts like LLVM, etc.
+//!
+//! More documentation can be found in each respective module below.
+
+extern crate build_helper;
+extern crate cmake;
+extern crate filetime;
+extern crate gcc;
+extern crate getopts;
+extern crate md5;
+extern crate num_cpus;
+extern crate rustc_serialize;
+extern crate toml;
 
+use std::cell::RefCell;
+use std::collections::HashMap;
 use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-
-/// Returns the environment variable which the dynamic library lookup path
-/// resides in for this platform.
-pub fn dylib_path_var() -> &'static str {
-    if cfg!(target_os = "windows") {
-        "PATH"
-    } else if cfg!(target_os = "macos") {
-        "DYLD_LIBRARY_PATH"
-    } else {
-        "LD_LIBRARY_PATH"
+use std::fs::{self, File};
+use std::path::{PathBuf, Path};
+use std::process::Command;
+
+use build_helper::{run_silent, output};
+
+use util::{exe, mtime, libdir, add_lib_path};
+
+/// A helper macro to `unwrap` a result except also print out details like:
+///
+/// * The file/line of the panic
+/// * The expression that failed
+/// * The error itself
+///
+/// This is currently used judiciously throughout the build system rather than
+/// using a `Result` with `try!`, but this may change on day...
+macro_rules! t {
+    ($e:expr) => (match $e {
+        Ok(e) => e,
+        Err(e) => panic!("{} failed with {}", stringify!($e), e),
+    })
+}
+
+mod cc;
+mod channel;
+mod check;
+mod clean;
+mod compile;
+mod config;
+mod dist;
+mod doc;
+mod flags;
+mod native;
+mod sanity;
+mod step;
+pub mod util;
+
+#[cfg(windows)]
+mod job;
+
+#[cfg(not(windows))]
+mod job {
+    pub unsafe fn setup() {}
+}
+
+pub use config::Config;
+pub use flags::Flags;
+
+/// A structure representing a Rust compiler.
+///
+/// Each compiler has a `stage` that it is associated with and a `host` that
+/// corresponds to the platform the compiler runs on. This structure is used as
+/// a parameter to many methods below.
+#[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
+pub struct Compiler<'a> {
+    stage: u32,
+    host: &'a str,
+}
+
+/// Global configuration for the build system.
+///
+/// This structure transitively contains all configuration for the build system.
+/// All filesystem-encoded configuration is in `config`, all flags are in
+/// `flags`, and then parsed or probed information is listed in the keys below.
+///
+/// This structure is a parameter of almost all methods in the build system,
+/// although most functions are implemented as free functions rather than
+/// methods specifically on this structure itself (to make it easier to
+/// organize).
+pub struct Build {
+    // User-specified configuration via config.toml
+    config: Config,
+
+    // User-specified configuration via CLI flags
+    flags: Flags,
+
+    // Derived properties from the above two configurations
+    cargo: PathBuf,
+    rustc: PathBuf,
+    src: PathBuf,
+    out: PathBuf,
+    release: String,
+    unstable_features: bool,
+    ver_hash: Option<String>,
+    short_ver_hash: Option<String>,
+    ver_date: Option<String>,
+    version: String,
+    package_vers: String,
+    bootstrap_key: String,
+    bootstrap_key_stage0: String,
+
+    // Probed tools at runtime
+    gdb_version: Option<String>,
+    lldb_version: Option<String>,
+    lldb_python_dir: Option<String>,
+
+    // Runtime state filled in later on
+    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
+    cxx: HashMap<String, gcc::Tool>,
+    compiler_rt_built: RefCell<HashMap<String, PathBuf>>,
+}
+
+/// The various "modes" of invoking Cargo.
+///
+/// These entries currently correspond to the various output directories of the
+/// build system, with each mod generating output in a different directory.
+#[derive(Clone, Copy)]
+pub enum Mode {
+    /// This cargo is going to build the standard library, placing output in the
+    /// "stageN-std" directory.
+    Libstd,
+
+    /// This cargo is going to build libtest, placing output in the
+    /// "stageN-test" directory.
+    Libtest,
+
+    /// This cargo is going to build librustc and compiler libraries, placing
+    /// output in the "stageN-rustc" directory.
+    Librustc,
+
+    /// This cargo is going to some build tool, placing output in the
+    /// "stageN-tools" directory.
+    Tool,
+}
+
+impl Build {
+    /// Creates a new set of build configuration from the `flags` on the command
+    /// line and the filesystem `config`.
+    ///
+    /// By default all build output will be placed in the current directory.
+    pub fn new(flags: Flags, config: Config) -> Build {
+        let cwd = t!(env::current_dir());
+        let src = flags.src.clone().unwrap_or(cwd.clone());
+        let out = cwd.join("build");
+
+        let stage0_root = out.join(&config.build).join("stage0/bin");
+        let rustc = match config.rustc {
+            Some(ref s) => PathBuf::from(s),
+            None => stage0_root.join(exe("rustc", &config.build)),
+        };
+        let cargo = match config.cargo {
+            Some(ref s) => PathBuf::from(s),
+            None => stage0_root.join(exe("cargo", &config.build)),
+        };
+
+        Build {
+            flags: flags,
+            config: config,
+            cargo: cargo,
+            rustc: rustc,
+            src: src,
+            out: out,
+
+            release: String::new(),
+            unstable_features: false,
+            ver_hash: None,
+            short_ver_hash: None,
+            ver_date: None,
+            version: String::new(),
+            bootstrap_key: String::new(),
+            bootstrap_key_stage0: String::new(),
+            package_vers: String::new(),
+            cc: HashMap::new(),
+            cxx: HashMap::new(),
+            compiler_rt_built: RefCell::new(HashMap::new()),
+            gdb_version: None,
+            lldb_version: None,
+            lldb_python_dir: None,
+        }
+    }
+
+    /// Executes the entire build, as configured by the flags and configuration.
+    pub fn build(&mut self) {
+        use step::Source::*;
+
+        unsafe {
+            job::setup();
+        }
+
+        if self.flags.clean {
+            return clean::clean(self);
+        }
+
+        self.verbose("finding compilers");
+        cc::find(self);
+        self.verbose("running sanity check");
+        sanity::check(self);
+        self.verbose("collecting channel variables");
+        channel::collect(self);
+        self.verbose("updating submodules");
+        self.update_submodules();
+
+        // The main loop of the build system.
+        //
+        // The `step::all` function returns a topographically sorted list of all
+        // steps that need to be executed as part of this build. Each step has a
+        // corresponding entry in `step.rs` and indicates some unit of work that
+        // needs to be done as part of the build.
+        //
+        // Almost all of these are simple one-liners that shell out to the
+        // corresponding functionality in the extra modules, where more
+        // documentation can be found.
+        for target in step::all(self) {
+            let doc_out = self.out.join(&target.target).join("doc");
+            match target.src {
+                Llvm { _dummy } => {
+                    native::llvm(self, target.target);
+                }
+                CompilerRt { _dummy } => {
+                    native::compiler_rt(self, target.target);
+                }
+                TestHelpers { _dummy } => {
+                    native::test_helpers(self, target.target);
+                }
+                Libstd { compiler } => {
+                    compile::std(self, target.target, &compiler);
+                }
+                Libtest { compiler } => {
+                    compile::test(self, target.target, &compiler);
+                }
+                Librustc { compiler } => {
+                    compile::rustc(self, target.target, &compiler);
+                }
+                LibstdLink { compiler, host } => {
+                    compile::std_link(self, target.target, &compiler, host);
+                }
+                LibtestLink { compiler, host } => {
+                    compile::test_link(self, target.target, &compiler, host);
+                }
+                LibrustcLink { compiler, host } => {
+                    compile::rustc_link(self, target.target, &compiler, host);
+                }
+                Rustc { stage: 0 } => {
+                    // nothing to do...
+                }
+                Rustc { stage } => {
+                    compile::assemble_rustc(self, stage, target.target);
+                }
+                ToolLinkchecker { stage } => {
+                    compile::tool(self, stage, target.target, "linkchecker");
+                }
+                ToolRustbook { stage } => {
+                    compile::tool(self, stage, target.target, "rustbook");
+                }
+                ToolErrorIndex { stage } => {
+                    compile::tool(self, stage, target.target,
+                                  "error_index_generator");
+                }
+                ToolCargoTest { stage } => {
+                    compile::tool(self, stage, target.target, "cargotest");
+                }
+                ToolTidy { stage } => {
+                    compile::tool(self, stage, target.target, "tidy");
+                }
+                ToolCompiletest { stage } => {
+                    compile::tool(self, stage, target.target, "compiletest");
+                }
+                DocBook { stage } => {
+                    doc::rustbook(self, stage, target.target, "book", &doc_out);
+                }
+                DocNomicon { stage } => {
+                    doc::rustbook(self, stage, target.target, "nomicon",
+                                  &doc_out);
+                }
+                DocStyle { stage } => {
+                    doc::rustbook(self, stage, target.target, "style",
+                                  &doc_out);
+                }
+                DocStandalone { stage } => {
+                    doc::standalone(self, stage, target.target, &doc_out);
+                }
+                DocStd { stage } => {
+                    doc::std(self, stage, target.target, &doc_out);
+                }
+                DocTest { stage } => {
+                    doc::test(self, stage, target.target, &doc_out);
+                }
+                DocRustc { stage } => {
+                    doc::rustc(self, stage, target.target, &doc_out);
+                }
+                DocErrorIndex { stage } => {
+                    doc::error_index(self, stage, target.target, &doc_out);
+                }
+
+                CheckLinkcheck { stage } => {
+                    check::linkcheck(self, stage, target.target);
+                }
+                CheckCargoTest { stage } => {
+                    check::cargotest(self, stage, target.target);
+                }
+                CheckTidy { stage } => {
+                    check::tidy(self, stage, target.target);
+                }
+                CheckRPass { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass", "run-pass");
+                }
+                CheckRPassFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass", "run-pass-fulldeps");
+                }
+                CheckCFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "compile-fail", "compile-fail");
+                }
+                CheckCFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "compile-fail", "compile-fail-fulldeps")
+                }
+                CheckPFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "parse-fail", "parse-fail");
+                }
+                CheckRFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-fail", "run-fail");
+                }
+                CheckRFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-fail", "run-fail-fulldeps");
+                }
+                CheckPretty { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "pretty");
+                }
+                CheckPrettyRPass { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass");
+                }
+                CheckPrettyRPassFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass-fulldeps");
+                }
+                CheckPrettyRFail { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-fail");
+                }
+                CheckPrettyRFailFull { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-fail-fulldeps");
+                }
+                CheckPrettyRPassValgrind { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "pretty", "run-pass-valgrind");
+                }
+                CheckCodegen { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "codegen", "codegen");
+                }
+                CheckCodegenUnits { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "codegen-units", "codegen-units");
+                }
+                CheckIncremental { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "incremental", "incremental");
+                }
+                CheckUi { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "ui", "ui");
+                }
+                CheckDebuginfo { compiler } => {
+                    if target.target.contains("msvc") {
+                        // nothing to do
+                    } else if target.target.contains("apple") {
+                        check::compiletest(self, &compiler, target.target,
+                                           "debuginfo-lldb", "debuginfo");
+                    } else {
+                        check::compiletest(self, &compiler, target.target,
+                                           "debuginfo-gdb", "debuginfo");
+                    }
+                }
+                CheckRustdoc { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "rustdoc", "rustdoc");
+                }
+                CheckRPassValgrind { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-pass-valgrind", "run-pass-valgrind");
+                }
+                CheckDocs { compiler } => {
+                    check::docs(self, &compiler);
+                }
+                CheckErrorIndex { compiler } => {
+                    check::error_index(self, &compiler);
+                }
+                CheckRMake { compiler } => {
+                    check::compiletest(self, &compiler, target.target,
+                                       "run-make", "run-make")
+                }
+                CheckCrateStd { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Libstd)
+                }
+                CheckCrateTest { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Libtest)
+                }
+                CheckCrateRustc { compiler } => {
+                    check::krate(self, &compiler, target.target, Mode::Librustc)
+                }
+
+                DistDocs { stage } => dist::docs(self, stage, target.target),
+                DistMingw { _dummy } => dist::mingw(self, target.target),
+                DistRustc { stage } => dist::rustc(self, stage, target.target),
+                DistStd { compiler } => dist::std(self, &compiler, target.target),
+
+                DebuggerScripts { stage } => {
+                    let compiler = Compiler::new(stage, target.target);
+                    dist::debugger_scripts(self,
+                                           &self.sysroot(&compiler),
+                                           target.target);
+                }
+
+                AndroidCopyLibs { compiler } => {
+                    check::android_copy_libs(self, &compiler, target.target);
+                }
+
+                // pseudo-steps
+                Dist { .. } |
+                Doc { .. } |
+                CheckTarget { .. } |
+                Check { .. } => {}
+            }
+        }
+    }
+
+    /// Updates all git submodules that we have.
+    ///
+    /// This will detect if any submodules are out of date an run the necessary
+    /// commands to sync them all with upstream.
+    fn update_submodules(&self) {
+        if !self.config.submodules {
+            return
+        }
+        if fs::metadata(self.src.join(".git")).is_err() {
+            return
+        }
+        let git_submodule = || {
+            let mut cmd = Command::new("git");
+            cmd.current_dir(&self.src).arg("submodule");
+            return cmd
+        };
+
+        // FIXME: this takes a seriously long time to execute on Windows and a
+        //        nontrivial amount of time on Unix, we should have a better way
+        //        of detecting whether we need to run all the submodule commands
+        //        below.
+        let out = output(git_submodule().arg("status"));
+        if !out.lines().any(|l| l.starts_with("+") || l.starts_with("-")) {
+            return
+        }
+
+        self.run(git_submodule().arg("sync"));
+        self.run(git_submodule().arg("init"));
+        self.run(git_submodule().arg("update"));
+        self.run(git_submodule().arg("update").arg("--recursive"));
+        self.run(git_submodule().arg("status").arg("--recursive"));
+        self.run(git_submodule().arg("foreach").arg("--recursive")
+                                .arg("git").arg("clean").arg("-fdx"));
+        self.run(git_submodule().arg("foreach").arg("--recursive")
+                                .arg("git").arg("checkout").arg("."));
+    }
+
+    /// Clear out `dir` if `input` is newer.
+    ///
+    /// After this executes, it will also ensure that `dir` exists.
+    fn clear_if_dirty(&self, dir: &Path, input: &Path) {
+        let stamp = dir.join(".stamp");
+        if mtime(&stamp) < mtime(input) {
+            self.verbose(&format!("Dirty - {}", dir.display()));
+            let _ = fs::remove_dir_all(dir);
+        }
+        t!(fs::create_dir_all(dir));
+        t!(File::create(stamp));
+    }
+
+    /// Prepares an invocation of `cargo` to be run.
+    ///
+    /// This will create a `Command` that represents a pending execution of
+    /// Cargo. This cargo will be configured to use `compiler` as the actual
+    /// rustc compiler, its output will be scoped by `mode`'s output directory,
+    /// it will pass the `--target` flag for the specified `target`, and will be
+    /// executing the Cargo command `cmd`.
+    fn cargo(&self,
+             compiler: &Compiler,
+             mode: Mode,
+             target: &str,
+             cmd: &str) -> Command {
+        let mut cargo = Command::new(&self.cargo);
+        let out_dir = self.stage_out(compiler, mode);
+        cargo.env("CARGO_TARGET_DIR", out_dir)
+             .arg(cmd)
+             .arg("-j").arg(self.jobs().to_string())
+             .arg("--target").arg(target);
+
+        let stage;
+        if compiler.stage == 0 && self.config.local_rebuild {
+            // Assume the local-rebuild rustc already has stage1 features.
+            stage = 1;
+        } else {
+            stage = compiler.stage;
+        }
+
+        // Customize the compiler we're running. Specify the compiler to cargo
+        // as our shim and then pass it some various options used to configure
+        // how the actual compiler itself is called.
+        //
+        // These variables are primarily all read by
+        // src/bootstrap/{rustc,rustdoc.rs}
+        cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc"))
+             .env("RUSTC_REAL", self.compiler_path(compiler))
+             .env("RUSTC_STAGE", stage.to_string())
+             .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
+             .env("RUSTC_CODEGEN_UNITS",
+                  self.config.rust_codegen_units.to_string())
+             .env("RUSTC_DEBUG_ASSERTIONS",
+                  self.config.rust_debug_assertions.to_string())
+             .env("RUSTC_SNAPSHOT", &self.rustc)
+             .env("RUSTC_SYSROOT", self.sysroot(compiler))
+             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
+             .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir())
+             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
+             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
+             .env("RUSTDOC_REAL", self.rustdoc(compiler))
+             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
+
+        self.add_bootstrap_key(compiler, &mut cargo);
+
+        // Specify some various options for build scripts used throughout
+        // the build.
+        //
+        // FIXME: the guard against msvc shouldn't need to be here
+        if !target.contains("msvc") {
+            cargo.env(format!("CC_{}", target), self.cc(target))
+                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
+                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
+        }
+
+        // If we're building for OSX, inform the compiler and the linker that
+        // we want to build a compiler runnable on 10.7
+        if target.contains("apple-darwin") {
+            cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7");
+        }
+
+        // Environment variables *required* needed throughout the build
+        //
+        // FIXME: should update code to not require this env var
+        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
+
+        if self.config.verbose || self.flags.verbose {
+            cargo.arg("-v");
+        }
+        if self.config.rust_optimize {
+            cargo.arg("--release");
+        }
+        return cargo
+    }
+
+    /// Get a path to the compiler specified.
+    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.rustc.clone()
+        } else {
+            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
+        }
+    }
+
+    /// Get the specified tool built by the specified compiler
+    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
+        self.cargo_out(compiler, Mode::Tool, compiler.host)
+            .join(exe(tool, compiler.host))
+    }
+
+    /// Get the `rustdoc` executable next to the specified compiler
+    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
+        let mut rustdoc = self.compiler_path(compiler);
+        rustdoc.pop();
+        rustdoc.push(exe("rustdoc", compiler.host));
+        return rustdoc
+    }
+
+    /// Get a `Command` which is ready to run `tool` in `stage` built for
+    /// `host`.
+    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
+        let mut cmd = Command::new(self.tool(&compiler, tool));
+        let host = compiler.host;
+        let paths = vec![
+            self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
+            self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
+            self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
+            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
+        ];
+        add_lib_path(paths, &mut cmd);
+        return cmd
+    }
+
+    /// Get the space-separated set of activated features for the standard
+    /// library.
+    fn std_features(&self) -> String {
+        let mut features = String::new();
+        if self.config.debug_jemalloc {
+            features.push_str(" debug-jemalloc");
+        }
+        if self.config.use_jemalloc {
+            features.push_str(" jemalloc");
+        }
+        return features
+    }
+
+    /// Get the space-separated set of activated features for the compiler.
+    fn rustc_features(&self) -> String {
+        let mut features = String::new();
+        if self.config.use_jemalloc {
+            features.push_str(" jemalloc");
+        }
+        return features
+    }
+
+    /// Component directory that Cargo will produce output into (e.g.
+    /// release/debug)
+    fn cargo_dir(&self) -> &'static str {
+        if self.config.rust_optimize {"release"} else {"debug"}
+    }
+
+    /// Returns the sysroot for the `compiler` specified that *this build system
+    /// generates*.
+    ///
+    /// That is, the sysroot for the stage0 compiler is not what the compiler
+    /// thinks it is by default, but it's the same as the default for stages
+    /// 1-3.
+    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.stage == 0 {
+            self.out.join(compiler.host).join("stage0-sysroot")
+        } else {
+            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
+        }
+    }
+
+    /// Returns the libdir where the standard library and other artifacts are
+    /// found for a compiler's sysroot.
+    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
+        self.sysroot(compiler).join("lib").join("rustlib")
+            .join(target).join("lib")
+    }
+
+    /// Returns the root directory for all output generated in a particular
+    /// stage when running with a particular host compiler.
+    ///
+    /// The mode indicates what the root directory is for.
+    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
+        let suffix = match mode {
+            Mode::Libstd => "-std",
+            Mode::Libtest => "-test",
+            Mode::Tool => "-tools",
+            Mode::Librustc => "-rustc",
+        };
+        self.out.join(compiler.host)
+                .join(format!("stage{}{}", compiler.stage, suffix))
+    }
+
+    /// Returns the root output directory for all Cargo output in a given stage,
+    /// running a particular comipler, wehther or not we're building the
+    /// standard library, and targeting the specified architecture.
+    fn cargo_out(&self,
+                 compiler: &Compiler,
+                 mode: Mode,
+                 target: &str) -> PathBuf {
+        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
+    }
+
+    /// Root output directory for LLVM compiled for `target`
+    ///
+    /// Note that if LLVM is configured externally then the directory returned
+    /// will likely be empty.
+    fn llvm_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("llvm")
+    }
+
+    /// Returns the path to `llvm-config` for the specified target.
+    ///
+    /// If a custom `llvm-config` was specified for target then that's returned
+    /// instead.
+    fn llvm_config(&self, target: &str) -> PathBuf {
+        let target_config = self.config.target_config.get(target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            s.clone()
+        } else {
+            self.llvm_out(&self.config.build).join("bin")
+                .join(exe("llvm-config", target))
+        }
+    }
+
+    /// Returns the path to `FileCheck` binary for the specified target
+    fn llvm_filecheck(&self, target: &str) -> PathBuf {
+        let target_config = self.config.target_config.get(target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            s.parent().unwrap().join(exe("FileCheck", target))
+        } else {
+            let base = self.llvm_out(&self.config.build).join("build");
+            let exe = exe("FileCheck", target);
+            if self.config.build.contains("msvc") {
+                base.join("Release/bin").join(exe)
+            } else {
+                base.join("bin").join(exe)
+            }
+        }
+    }
+
+    /// Root output directory for compiler-rt compiled for `target`
+    fn compiler_rt_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("compiler-rt")
+    }
+
+    /// Root output directory for rust_test_helpers library compiled for
+    /// `target`
+    fn test_helpers_out(&self, target: &str) -> PathBuf {
+        self.out.join(target).join("rust-test-helpers")
+    }
+
+    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
+    /// library lookup path.
+    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
+        // Windows doesn't need dylib path munging because the dlls for the
+        // compiler live next to the compiler and the system will find them
+        // automatically.
+        if cfg!(windows) {
+            return
+        }
+
+        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
+    }
+
+    /// Adds the compiler's bootstrap key to the environment of `cmd`.
+    fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) {
+        // In stage0 we're using a previously released stable compiler, so we
+        // use the stage0 bootstrap key. Otherwise we use our own build's
+        // bootstrap key.
+        let bootstrap_key = if compiler.is_snapshot(self) && !self.config.local_rebuild {
+            &self.bootstrap_key_stage0
+        } else {
+            &self.bootstrap_key
+        };
+        cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key);
+    }
+
+    /// Returns the compiler's libdir where it stores the dynamic libraries that
+    /// it itself links against.
+    ///
+    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
+    /// Windows.
+    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.rustc_snapshot_libdir()
+        } else {
+            self.sysroot(compiler).join(libdir(compiler.host))
+        }
+    }
+
+    /// Returns the libdir of the snapshot compiler.
+    fn rustc_snapshot_libdir(&self) -> PathBuf {
+        self.rustc.parent().unwrap().parent().unwrap()
+            .join(libdir(&self.config.build))
+    }
+
+    /// Runs a command, printing out nice contextual information if it fails.
+    fn run(&self, cmd: &mut Command) {
+        self.verbose(&format!("running: {:?}", cmd));
+        run_silent(cmd)
+    }
+
+    /// Prints a message if this build is configured in verbose mode.
+    fn verbose(&self, msg: &str) {
+        if self.flags.verbose || self.config.verbose {
+            println!("{}", msg);
+        }
+    }
+
+    /// Returns the number of parallel jobs that have been configured for this
+    /// build.
+    fn jobs(&self) -> u32 {
+        self.flags.jobs.unwrap_or(num_cpus::get() as u32)
+    }
+
+    /// Returns the path to the C compiler for the target specified.
+    fn cc(&self, target: &str) -> &Path {
+        self.cc[target].0.path()
+    }
+
+    /// Returns a list of flags to pass to the C compiler for the target
+    /// specified.
+    fn cflags(&self, target: &str) -> Vec<String> {
+        // Filter out -O and /O (the optimization flags) that we picked up from
+        // gcc-rs because the build scripts will determine that for themselves.
+        let mut base = self.cc[target].0.args().iter()
+                           .map(|s| s.to_string_lossy().into_owned())
+                           .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
+                           .collect::<Vec<_>>();
+
+        // If we're compiling on OSX then we add a few unconditional flags
+        // indicating that we want libc++ (more filled out than libstdc++) and
+        // we want to compile for 10.7. This way we can ensure that
+        // LLVM/jemalloc/etc are all properly compiled.
+        if target.contains("apple-darwin") {
+            base.push("-stdlib=libc++".into());
+            base.push("-mmacosx-version-min=10.7".into());
+        }
+        return base
+    }
+
+    /// Returns the path to the `ar` archive utility for the target specified.
+    fn ar(&self, target: &str) -> Option<&Path> {
+        self.cc[target].1.as_ref().map(|p| &**p)
+    }
+
+    /// Returns the path to the C++ compiler for the target specified, may panic
+    /// if no C++ compiler was configured for the target.
+    fn cxx(&self, target: &str) -> &Path {
+        self.cxx[target].path()
+    }
+
+    /// Returns flags to pass to the compiler to generate code for `target`.
+    fn rustc_flags(&self, target: &str) -> Vec<String> {
+        // New flags should be added here with great caution!
+        //
+        // It's quite unfortunate to **require** flags to generate code for a
+        // target, so it should only be passed here if absolutely necessary!
+        // Most default configuration should be done through target specs rather
+        // than an entry here.
+
+        let mut base = Vec::new();
+        if target != self.config.build && !target.contains("msvc") {
+            base.push(format!("-Clinker={}", self.cc(target).display()));
+        }
+        return base
     }
 }
 
-/// Parses the `dylib_path_var()` environment variable, returning a list of
-/// paths that are members of this lookup path.
-pub fn dylib_path() -> Vec<PathBuf> {
-    env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
-        .collect()
+impl<'a> Compiler<'a> {
+    /// Creates a new complier for the specified stage/host
+    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
+        Compiler { stage: stage, host: host }
+    }
+
+    /// Returns whether this is a snapshot compiler for `build`'s configuration
+    fn is_snapshot(&self, build: &Build) -> bool {
+        self.stage == 0 && self.host == build.config.build
+    }
 }
diff --git a/src/bootstrap/main.rs b/src/bootstrap/main.rs
deleted file mode 100644 (file)
index 18d03b5..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! rustbuild, the Rust build system
-//!
-//! This is the entry point for the build system used to compile the `rustc`
-//! compiler. Lots of documentation can be found in the `README.md` file next to
-//! this file, and otherwise documentation can be found throughout the `build`
-//! directory in each respective module.
-
-#![deny(warnings)]
-
-extern crate bootstrap;
-extern crate build_helper;
-extern crate cmake;
-extern crate filetime;
-extern crate gcc;
-extern crate getopts;
-extern crate libc;
-extern crate num_cpus;
-extern crate rustc_serialize;
-extern crate toml;
-extern crate md5;
-
-use std::env;
-
-use build::{Flags, Config, Build};
-
-mod build;
-
-fn main() {
-    let args = env::args().skip(1).collect::<Vec<_>>();
-    let flags = Flags::parse(&args);
-    let mut config = Config::parse(&flags.build, flags.config.clone());
-
-    // compat with `./configure` while we're still using that
-    if std::fs::metadata("config.mk").is_ok() {
-        config.update_with_config_mk();
-    }
-
-    Build::new(flags, config).build();
-}
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
new file mode 100644 (file)
index 0000000..83e9393
--- /dev/null
@@ -0,0 +1,238 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Compilation of native dependencies like LLVM.
+//!
+//! Native projects like LLVM unfortunately aren't suited just yet for
+//! compilation in build scripts that Cargo has. This is because thie
+//! compilation takes a *very* long time but also because we don't want to
+//! compile LLVM 3 times as part of a normal bootstrap (we want it cached).
+//!
+//! LLVM and compiler-rt are essentially just wired up to everything else to
+//! ensure that they're always in place if needed.
+
+use std::path::Path;
+use std::process::Command;
+use std::fs::{self, File};
+
+use build_helper::output;
+use cmake;
+use gcc;
+
+use Build;
+use util::{staticlib, up_to_date};
+
+/// Compile LLVM for `target`.
+pub fn llvm(build: &Build, target: &str) {
+    // If we're using a custom LLVM bail out here, but we can only use a
+    // custom LLVM for the build triple.
+    if let Some(config) = build.config.target_config.get(target) {
+        if let Some(ref s) = config.llvm_config {
+            return check_llvm_version(build, s);
+        }
+    }
+
+    // If the cleaning trigger is newer than our built artifacts (or if the
+    // artifacts are missing) then we keep going, otherwise we bail out.
+    let dst = build.llvm_out(target);
+    let stamp = build.src.join("src/rustllvm/llvm-auto-clean-trigger");
+    let done_stamp = dst.join("llvm-finished-building");
+    build.clear_if_dirty(&dst, &stamp);
+    if fs::metadata(&done_stamp).is_ok() {
+        return
+    }
+
+    println!("Building LLVM for {}", target);
+
+    let _ = fs::remove_dir_all(&dst.join("build"));
+    t!(fs::create_dir_all(&dst.join("build")));
+    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
+
+    // http://llvm.org/docs/CMake.html
+    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
+    if build.config.ninja {
+        cfg.generator("Ninja");
+    }
+    cfg.target(target)
+       .host(&build.config.build)
+       .out_dir(&dst)
+       .profile(if build.config.llvm_optimize {"Release"} else {"Debug"})
+       .define("LLVM_ENABLE_ASSERTIONS", assertions)
+       .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC")
+       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
+       .define("LLVM_INCLUDE_TESTS", "OFF")
+       .define("LLVM_INCLUDE_DOCS", "OFF")
+       .define("LLVM_ENABLE_ZLIB", "OFF")
+       .define("WITH_POLLY", "OFF")
+       .define("LLVM_ENABLE_TERMINFO", "OFF")
+       .define("LLVM_ENABLE_LIBEDIT", "OFF")
+       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string());
+
+    if target.starts_with("i686") {
+        cfg.define("LLVM_BUILD_32_BITS", "ON");
+    }
+
+    // http://llvm.org/docs/HowToCrossCompileLLVM.html
+    if target != build.config.build {
+        // FIXME: if the llvm root for the build triple is overridden then we
+        //        should use llvm-tblgen from there, also should verify that it
+        //        actually exists most of the time in normal installs of LLVM.
+        let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen");
+        cfg.define("CMAKE_CROSSCOMPILING", "True")
+           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
+           .define("LLVM_TABLEGEN", &host)
+           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
+    }
+
+    // MSVC handles compiler business itself
+    if !target.contains("msvc") {
+        if build.config.ccache {
+           cfg.define("CMAKE_C_COMPILER", "ccache")
+              .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
+              .define("CMAKE_CXX_COMPILER", "ccache")
+              .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
+        } else {
+           cfg.define("CMAKE_C_COMPILER", build.cc(target))
+              .define("CMAKE_CXX_COMPILER", build.cxx(target));
+        }
+        cfg.build_arg("-j").build_arg(build.jobs().to_string());
+
+        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
+        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
+    }
+
+    // FIXME: we don't actually need to build all LLVM tools and all LLVM
+    //        libraries here, e.g. we just want a few components and a few
+    //        tools. Figure out how to filter them down and only build the right
+    //        tools and libs on all platforms.
+    cfg.build();
+
+    t!(File::create(&done_stamp));
+}
+
+fn check_llvm_version(build: &Build, llvm_config: &Path) {
+    if !build.config.llvm_version_check {
+        return
+    }
+
+    let mut cmd = Command::new(llvm_config);
+    let version = output(cmd.arg("--version"));
+    if version.starts_with("3.5") || version.starts_with("3.6") ||
+       version.starts_with("3.7") {
+        return
+    }
+    panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
+}
+
+/// Compiles the `compiler-rt` library, or at least the builtins part of it.
+///
+/// This uses the CMake build system and an existing LLVM build directory to
+/// compile the project.
+pub fn compiler_rt(build: &Build, target: &str) {
+    let dst = build.compiler_rt_out(target);
+    let arch = target.split('-').next().unwrap();
+    let mode = if build.config.rust_optimize {"Release"} else {"Debug"};
+
+    let build_llvm_config = build.llvm_config(&build.config.build);
+    let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt"));
+    cfg.target(target)
+       .host(&build.config.build)
+       .out_dir(&dst)
+       .profile(mode)
+       .define("LLVM_CONFIG_PATH", build_llvm_config)
+       .define("COMPILER_RT_DEFAULT_TARGET_TRIPLE", target)
+       .define("COMPILER_RT_BUILD_SANITIZERS", "OFF")
+       .define("COMPILER_RT_BUILD_EMUTLS", "OFF")
+       // inform about c/c++ compilers, the c++ compiler isn't actually used but
+       // it's needed to get the initial configure to work on all platforms.
+       .define("CMAKE_C_COMPILER", build.cc(target))
+       .define("CMAKE_CXX_COMPILER", build.cc(target));
+
+    let (dir, build_target, libname) = if target.contains("linux") ||
+                                          target.contains("freebsd") ||
+                                          target.contains("netbsd") {
+        let os_extra = if target.contains("android") && target.contains("arm") {
+            "-android"
+        } else {
+            ""
+        };
+        let builtins_arch = match arch {
+            "i586" => "i386",
+            "arm" | "armv7" if target.contains("android") => "armhf",
+            "arm" if target.contains("eabihf") => "armhf",
+            _ => arch,
+        };
+        let target = format!("clang_rt.builtins-{}", builtins_arch);
+        ("linux".to_string(),
+         target.clone(),
+         format!("{}{}", target, os_extra))
+    } else if target.contains("apple-darwin") {
+        let builtins_arch = match arch {
+            "i686" => "i386",
+            _ => arch,
+        };
+        let target = format!("clang_rt.builtins_{}_osx", builtins_arch);
+        ("builtins".to_string(), target.clone(), target)
+    } else if target.contains("apple-ios") {
+        cfg.define("COMPILER_RT_ENABLE_IOS", "ON");
+        let target = match arch {
+            "armv7s" => "hard_pic_armv7em_macho_embedded".to_string(),
+            "aarch64" => "builtins_arm64_ios".to_string(),
+            _ => format!("hard_pic_{}_macho_embedded", arch),
+        };
+        ("builtins".to_string(), target.clone(), target)
+    } else if target.contains("windows-gnu") {
+        let target = format!("clang_rt.builtins-{}", arch);
+        ("windows".to_string(), target.clone(), target)
+    } else if target.contains("windows-msvc") {
+        let builtins_arch = match arch {
+            "i586" | "i686" => "i386",
+            _ => arch,
+        };
+        (format!("windows/{}", mode),
+         "lib/builtins/builtins".to_string(),
+         format!("clang_rt.builtins-{}", builtins_arch))
+    } else {
+        panic!("can't get os from target: {}", target)
+    };
+    let output = dst.join("build/lib").join(dir)
+                    .join(staticlib(&libname, target));
+    build.compiler_rt_built.borrow_mut().insert(target.to_string(),
+                                                output.clone());
+    if fs::metadata(&output).is_ok() {
+        return
+    }
+    let _ = fs::remove_dir_all(&dst);
+    t!(fs::create_dir_all(&dst));
+    cfg.build_target(&build_target);
+    cfg.build();
+}
+
+/// Compiles the `rust_test_helpers.c` library which we used in various
+/// `run-pass` test suites for ABI testing.
+pub fn test_helpers(build: &Build, target: &str) {
+    let dst = build.test_helpers_out(target);
+    let src = build.src.join("src/rt/rust_test_helpers.c");
+    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
+        return
+    }
+
+    println!("Building test helpers");
+    t!(fs::create_dir_all(&dst));
+    let mut cfg = gcc::Config::new();
+    cfg.cargo_metadata(false)
+       .out_dir(&dst)
+       .target(target)
+       .host(&build.config.build)
+       .opt_level(0)
+       .debug(false)
+       .file(build.src.join("src/rt/rust_test_helpers.c"))
+       .compile("librust_test_helpers.a");
+}
diff --git a/src/bootstrap/rustc.rs b/src/bootstrap/rustc.rs
deleted file mode 100644 (file)
index 97deced..0000000
+++ /dev/null
@@ -1,164 +0,0 @@
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Shim which is passed to Cargo as "rustc" when running the bootstrap.
-//!
-//! This shim will take care of some various tasks that our build process
-//! requires that Cargo can't quite do through normal configuration:
-//!
-//! 1. When compiling build scripts and build dependencies, we need a guaranteed
-//!    full standard library available. The only compiler which actually has
-//!    this is the snapshot, so we detect this situation and always compile with
-//!    the snapshot compiler.
-//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
-//!    (and this slightly differs based on a whether we're using a snapshot or
-//!    not), so we do that all here.
-//!
-//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
-//! switching compilers for the bootstrap and for build scripts will probably
-//! never get replaced.
-
-extern crate bootstrap;
-
-use std::env;
-use std::ffi::OsString;
-use std::path::PathBuf;
-use std::process::Command;
-
-fn main() {
-    let args = env::args_os().skip(1).collect::<Vec<_>>();
-    // Detect whether or not we're a build script depending on whether --target
-    // is passed (a bit janky...)
-    let target = args.windows(2).find(|w| &*w[0] == "--target")
-                                .and_then(|w| w[1].to_str());
-
-    // Build scripts always use the snapshot compiler which is guaranteed to be
-    // able to produce an executable, whereas intermediate compilers may not
-    // have the standard library built yet and may not be able to produce an
-    // executable. Otherwise we just use the standard compiler we're
-    // bootstrapping with.
-    let (rustc, libdir) = if target.is_none() {
-        ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
-    } else {
-        ("RUSTC_REAL", "RUSTC_LIBDIR")
-    };
-    let stage = env::var("RUSTC_STAGE").unwrap();
-
-    let rustc = env::var_os(rustc).unwrap();
-    let libdir = env::var_os(libdir).unwrap();
-    let mut dylib_path = bootstrap::dylib_path();
-    dylib_path.insert(0, PathBuf::from(libdir));
-
-    let mut cmd = Command::new(rustc);
-    cmd.args(&args)
-       .arg("--cfg").arg(format!("stage{}", stage))
-       .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
-    if let Some(target) = target {
-        // The stage0 compiler has a special sysroot distinct from what we
-        // actually downloaded, so we just always pass the `--sysroot` option.
-        cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap());
-
-        // When we build Rust dylibs they're all intended for intermediate
-        // usage, so make sure we pass the -Cprefer-dynamic flag instead of
-        // linking all deps statically into the dylib.
-        cmd.arg("-Cprefer-dynamic");
-
-        // Help the libc crate compile by assisting it in finding the MUSL
-        // native libraries.
-        if let Some(s) = env::var_os("MUSL_ROOT") {
-            let mut root = OsString::from("native=");
-            root.push(&s);
-            root.push("/lib");
-            cmd.arg("-L").arg(&root);
-        }
-
-        // Pass down extra flags, commonly used to configure `-Clinker` when
-        // cross compiling.
-        if let Ok(s) = env::var("RUSTC_FLAGS") {
-            cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
-        }
-
-        // If we're compiling specifically the `panic_abort` crate then we pass
-        // the `-C panic=abort` option. Note that we do not do this for any
-        // other crate intentionally as this is the only crate for now that we
-        // ship with panic=abort.
-        //
-        // This... is a bit of a hack how we detect this. Ideally this
-        // information should be encoded in the crate I guess? Would likely
-        // require an RFC amendment to RFC 1513, however.
-        let is_panic_abort = args.windows(2).any(|a| {
-            &*a[0] == "--crate-name" && &*a[1] == "panic_abort"
-        });
-        // FIXME(stage0): remove this `stage != "0"` condition
-        if is_panic_abort && stage != "0" {
-            cmd.arg("-C").arg("panic=abort");
-        }
-
-        // Set various options from config.toml to configure how we're building
-        // code.
-        if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) {
-            cmd.arg("-g");
-        }
-        let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") {
-            Ok(s) => if s == "true" {"y"} else {"n"},
-            Err(..) => "n",
-        };
-        cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions));
-        if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") {
-            cmd.arg("-C").arg(format!("codegen-units={}", s));
-        }
-
-        // Dealing with rpath here is a little special, so let's go into some
-        // detail. First off, `-rpath` is a linker option on Unix platforms
-        // which adds to the runtime dynamic loader path when looking for
-        // dynamic libraries. We use this by default on Unix platforms to ensure
-        // that our nightlies behave the same on Windows, that is they work out
-        // of the box. This can be disabled, of course, but basically that's why
-        // we're gated on RUSTC_RPATH here.
-        //
-        // Ok, so the astute might be wondering "why isn't `-C rpath` used
-        // here?" and that is indeed a good question to task. This codegen
-        // option is the compiler's current interface to generating an rpath.
-        // Unfortunately it doesn't quite suffice for us. The flag currently
-        // takes no value as an argument, so the compiler calculates what it
-        // should pass to the linker as `-rpath`. This unfortunately is based on
-        // the **compile time** directory structure which when building with
-        // Cargo will be very different than the runtime directory structure.
-        //
-        // All that's a really long winded way of saying that if we use
-        // `-Crpath` then the executables generated have the wrong rpath of
-        // something like `$ORIGIN/deps` when in fact the way we distribute
-        // rustc requires the rpath to be `$ORIGIN/../lib`.
-        //
-        // So, all in all, to set up the correct rpath we pass the linker
-        // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
-        // fun to pass a flag to a tool to pass a flag to pass a flag to a tool
-        // to change a flag in a binary?
-        if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
-            let rpath = if target.contains("apple") {
-                Some("-Wl,-rpath,@loader_path/../lib")
-            } else if !target.contains("windows") {
-                Some("-Wl,-rpath,$ORIGIN/../lib")
-            } else {
-                None
-            };
-            if let Some(rpath) = rpath {
-                cmd.arg("-C").arg(format!("link-args={}", rpath));
-            }
-        }
-    }
-
-    // Actually run the compiler!
-    std::process::exit(match cmd.status() {
-        Ok(s) => s.code().unwrap_or(1),
-        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
-    })
-}
diff --git a/src/bootstrap/rustdoc.rs b/src/bootstrap/rustdoc.rs
deleted file mode 100644 (file)
index 88ac26d..0000000
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Shim which is passed to Cargo as "rustdoc" when running the bootstrap.
-//!
-//! See comments in `src/bootstrap/rustc.rs` for more information.
-
-extern crate bootstrap;
-
-use std::env;
-use std::process::Command;
-use std::path::PathBuf;
-
-fn main() {
-    let args = env::args_os().skip(1).collect::<Vec<_>>();
-    let rustdoc = env::var_os("RUSTDOC_REAL").unwrap();
-    let libdir = env::var_os("RUSTC_LIBDIR").unwrap();
-
-    let mut dylib_path = bootstrap::dylib_path();
-    dylib_path.insert(0, PathBuf::from(libdir));
-
-    let mut cmd = Command::new(rustdoc);
-    cmd.args(&args)
-       .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap()))
-       .arg("--cfg").arg("dox")
-       .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-    std::process::exit(match cmd.status() {
-        Ok(s) => s.code().unwrap_or(1),
-        Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
-    })
-}
-
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
new file mode 100644 (file)
index 0000000..7c0f09c
--- /dev/null
@@ -0,0 +1,172 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Sanity checking performed by rustbuild before actually executing anything.
+//!
+//! This module contains the implementation of ensuring that the build
+//! environment looks reasonable before progressing. This will verify that
+//! various programs like git and python exist, along with ensuring that all C
+//! compilers for cross-compiling are found.
+//!
+//! In theory if we get past this phase it's a bug if a build fails, but in
+//! practice that's likely not true!
+
+use std::collections::HashSet;
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs;
+use std::process::Command;
+
+use build_helper::output;
+
+use Build;
+
+pub fn check(build: &mut Build) {
+    let mut checked = HashSet::new();
+    let path = env::var_os("PATH").unwrap_or(OsString::new());
+    let mut need_cmd = |cmd: &OsStr| {
+        if !checked.insert(cmd.to_owned()) {
+            return
+        }
+        for path in env::split_paths(&path).map(|p| p.join(cmd)) {
+            if fs::metadata(&path).is_ok() ||
+               fs::metadata(path.with_extension("exe")).is_ok() {
+                return
+            }
+        }
+        panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
+    };
+
+    // If we've got a git directory we're gona need git to update
+    // submodules and learn about various other aspects.
+    if fs::metadata(build.src.join(".git")).is_ok() {
+        need_cmd("git".as_ref());
+    }
+
+    // We need cmake, but only if we're actually building LLVM
+    for host in build.config.host.iter() {
+        if let Some(config) = build.config.target_config.get(host) {
+            if config.llvm_config.is_some() {
+                continue
+            }
+        }
+        need_cmd("cmake".as_ref());
+        if build.config.ninja {
+            need_cmd("ninja".as_ref())
+        }
+        break
+    }
+
+    need_cmd("python".as_ref());
+
+    // We're gonna build some custom C code here and there, host triples
+    // also build some C++ shims for LLVM so we need a C++ compiler.
+    for target in build.config.target.iter() {
+        need_cmd(build.cc(target).as_ref());
+        if let Some(ar) = build.ar(target) {
+            need_cmd(ar.as_ref());
+        }
+    }
+    for host in build.config.host.iter() {
+        need_cmd(build.cxx(host).as_ref());
+    }
+
+    // Externally configured LLVM requires FileCheck to exist
+    let filecheck = build.llvm_filecheck(&build.config.build);
+    if !filecheck.starts_with(&build.out) && !filecheck.exists() {
+        panic!("filecheck executable {:?} does not exist", filecheck);
+    }
+
+    for target in build.config.target.iter() {
+        // Either can't build or don't want to run jemalloc on these targets
+        if target.contains("rumprun") ||
+           target.contains("bitrig") ||
+           target.contains("openbsd") ||
+           target.contains("msvc") {
+            build.config.use_jemalloc = false;
+        }
+
+        // Can't compile for iOS unless we're on OSX
+        if target.contains("apple-ios") &&
+           !build.config.build.contains("apple-darwin") {
+            panic!("the iOS target is only supported on OSX");
+        }
+
+        // Make sure musl-root is valid if specified
+        if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) {
+            match build.config.musl_root {
+                Some(ref root) => {
+                    if fs::metadata(root.join("lib/libc.a")).is_err() {
+                        panic!("couldn't find libc.a in musl dir: {}",
+                               root.join("lib").display());
+                    }
+                    if fs::metadata(root.join("lib/libunwind.a")).is_err() {
+                        panic!("couldn't find libunwind.a in musl dir: {}",
+                               root.join("lib").display());
+                    }
+                }
+                None => {
+                    panic!("when targeting MUSL the build.musl-root option \
+                            must be specified in config.toml")
+                }
+            }
+        }
+
+        if target.contains("msvc") {
+            // There are three builds of cmake on windows: MSVC, MinGW, and
+            // Cygwin. The Cygwin build does not have generators for Visual
+            // Studio, so detect that here and error.
+            let out = output(Command::new("cmake").arg("--help"));
+            if !out.contains("Visual Studio") {
+                panic!("
+cmake does not support Visual Studio generators.
+
+This is likely due to it being an msys/cygwin build of cmake,
+rather than the required windows version, built using MinGW
+or Visual Studio.
+
+If you are building under msys2 try installing the mingw-w64-x86_64-cmake
+package instead of cmake:
+
+$ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
+");
+            }
+        }
+
+        if target.contains("arm-linux-android") {
+            need_cmd("adb".as_ref());
+        }
+    }
+
+    for host in build.flags.host.iter() {
+        if !build.config.host.contains(host) {
+            panic!("specified host `{}` is not in the ./configure list", host);
+        }
+    }
+    for target in build.flags.target.iter() {
+        if !build.config.target.contains(target) {
+            panic!("specified target `{}` is not in the ./configure list",
+                   target);
+        }
+    }
+
+    let run = |cmd: &mut Command| {
+        cmd.output().map(|output| {
+            String::from_utf8_lossy(&output.stdout)
+                   .lines().next().unwrap()
+                   .to_string()
+        })
+    };
+    build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
+    build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
+    if build.lldb_version.is_some() {
+        build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
+    }
+}
diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs
new file mode 100644 (file)
index 0000000..4b3be04
--- /dev/null
@@ -0,0 +1,590 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Major workhorse of rustbuild, definition and dependencies between stages of
+//! the copmile.
+//!
+//! The primary purpose of this module is to define the various `Step`s of
+//! execution of the build. Each `Step` has a corresponding `Source` indicating
+//! what it's actually doing along with a number of dependencies which must be
+//! executed first.
+//!
+//! This module will take the CLI as input and calculate the steps required for
+//! the build requested, ensuring that all intermediate pieces are in place.
+//! Essentially this module is a `make`-replacement, but not as good.
+
+use std::collections::HashSet;
+
+use {Build, Compiler};
+
+#[derive(Hash, Eq, PartialEq, Clone, Debug)]
+pub struct Step<'a> {
+    pub src: Source<'a>,
+    pub target: &'a str,
+}
+
+/// Macro used to iterate over all targets that are recognized by the build
+/// system.
+///
+/// Whenever a new step is added it will involve adding an entry here, updating
+/// the dependencies section below, and then adding an implementation of the
+/// step in `build/mod.rs`.
+///
+/// This macro takes another macro as an argument and then calls that macro with
+/// all steps that the build system knows about.
+macro_rules! targets {
+    ($m:ident) => {
+        $m! {
+            // Step representing building the stageN compiler. This is just the
+            // compiler executable itself, not any of the support libraries
+            (rustc, Rustc { stage: u32 }),
+
+            // Steps for the two main cargo builds. These are parameterized over
+            // the compiler which is producing the artifact.
+            (libstd, Libstd { compiler: Compiler<'a> }),
+            (libtest, Libtest { compiler: Compiler<'a> }),
+            (librustc, Librustc { compiler: Compiler<'a> }),
+
+            // Links the target produced by the compiler provided into the
+            // host's directory also provided.
+            (libstd_link, LibstdLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+            (libtest_link, LibtestLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+            (librustc_link, LibrustcLink {
+                compiler: Compiler<'a>,
+                host: &'a str
+            }),
+
+            // Various tools that we can build as part of the build.
+            (tool_linkchecker, ToolLinkchecker { stage: u32 }),
+            (tool_rustbook, ToolRustbook { stage: u32 }),
+            (tool_error_index, ToolErrorIndex { stage: u32 }),
+            (tool_cargotest, ToolCargoTest { stage: u32 }),
+            (tool_tidy, ToolTidy { stage: u32 }),
+            (tool_compiletest, ToolCompiletest { stage: u32 }),
+
+            // Steps for long-running native builds. Ideally these wouldn't
+            // actually exist and would be part of build scripts, but for now
+            // these are here.
+            //
+            // There aren't really any parameters to this, but empty structs
+            // with braces are unstable so we just pick something that works.
+            (llvm, Llvm { _dummy: () }),
+            (compiler_rt, CompilerRt { _dummy: () }),
+            (test_helpers, TestHelpers { _dummy: () }),
+            (debugger_scripts, DebuggerScripts { stage: u32 }),
+
+            // Steps for various pieces of documentation that we can generate,
+            // the 'doc' step is just a pseudo target to depend on a bunch of
+            // others.
+            (doc, Doc { stage: u32 }),
+            (doc_book, DocBook { stage: u32 }),
+            (doc_nomicon, DocNomicon { stage: u32 }),
+            (doc_style, DocStyle { stage: u32 }),
+            (doc_standalone, DocStandalone { stage: u32 }),
+            (doc_std, DocStd { stage: u32 }),
+            (doc_test, DocTest { stage: u32 }),
+            (doc_rustc, DocRustc { stage: u32 }),
+            (doc_error_index, DocErrorIndex { stage: u32 }),
+
+            // Steps for running tests. The 'check' target is just a pseudo
+            // target to depend on a bunch of others.
+            (check, Check { stage: u32, compiler: Compiler<'a> }),
+            (check_target, CheckTarget { stage: u32, compiler: Compiler<'a> }),
+            (check_linkcheck, CheckLinkcheck { stage: u32 }),
+            (check_cargotest, CheckCargoTest { stage: u32 }),
+            (check_tidy, CheckTidy { stage: u32 }),
+            (check_rpass, CheckRPass { compiler: Compiler<'a> }),
+            (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
+            (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
+            (check_rfail, CheckRFail { compiler: Compiler<'a> }),
+            (check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }),
+            (check_cfail, CheckCFail { compiler: Compiler<'a> }),
+            (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
+            (check_pfail, CheckPFail { compiler: Compiler<'a> }),
+            (check_pretty, CheckPretty { compiler: Compiler<'a> }),
+            (check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }),
+            (check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }),
+            (check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }),
+            (check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }),
+            (check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }),
+            (check_codegen, CheckCodegen { compiler: Compiler<'a> }),
+            (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
+            (check_incremental, CheckIncremental { compiler: Compiler<'a> }),
+            (check_ui, CheckUi { compiler: Compiler<'a> }),
+            (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
+            (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
+            (check_docs, CheckDocs { compiler: Compiler<'a> }),
+            (check_error_index, CheckErrorIndex { compiler: Compiler<'a> }),
+            (check_rmake, CheckRMake { compiler: Compiler<'a> }),
+            (check_crate_std, CheckCrateStd { compiler: Compiler<'a> }),
+            (check_crate_test, CheckCrateTest { compiler: Compiler<'a> }),
+            (check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }),
+
+            // Distribution targets, creating tarballs
+            (dist, Dist { stage: u32 }),
+            (dist_docs, DistDocs { stage: u32 }),
+            (dist_mingw, DistMingw { _dummy: () }),
+            (dist_rustc, DistRustc { stage: u32 }),
+            (dist_std, DistStd { compiler: Compiler<'a> }),
+
+            // Misc targets
+            (android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }),
+        }
+    }
+}
+
+// Define the `Source` enum by iterating over all the steps and peeling out just
+// the types that we want to define.
+
+macro_rules! item { ($a:item) => ($a) }
+
+macro_rules! define_source {
+    ($(($short:ident, $name:ident { $($args:tt)* }),)*) => {
+        item! {
+            #[derive(Hash, Eq, PartialEq, Clone, Debug)]
+            pub enum Source<'a> {
+                $($name { $($args)* }),*
+            }
+        }
+    }
+}
+
+targets!(define_source);
+
+/// Calculate a list of all steps described by `build`.
+///
+/// This will inspect the flags passed in on the command line and use that to
+/// build up a list of steps to execute. These steps will then be transformed
+/// into a topologically sorted list which when executed left-to-right will
+/// correctly sequence the entire build.
+pub fn all(build: &Build) -> Vec<Step> {
+    let mut ret = Vec::new();
+    let mut all = HashSet::new();
+    for target in top_level(build) {
+        fill(build, &target, &mut ret, &mut all);
+    }
+    return ret;
+
+    fn fill<'a>(build: &'a Build,
+                target: &Step<'a>,
+                ret: &mut Vec<Step<'a>>,
+                set: &mut HashSet<Step<'a>>) {
+        if set.insert(target.clone()) {
+            for dep in target.deps(build) {
+                fill(build, &dep, ret, set);
+            }
+            ret.push(target.clone());
+        }
+    }
+}
+
+/// Determines what top-level targets are requested as part of this build,
+/// returning them as a list.
+fn top_level(build: &Build) -> Vec<Step> {
+    let mut targets = Vec::new();
+    let stage = build.flags.stage.unwrap_or(2);
+
+    let host = Step {
+        src: Source::Llvm { _dummy: () },
+        target: build.flags.host.iter().next()
+                     .unwrap_or(&build.config.build),
+    };
+    let target = Step {
+        src: Source::Llvm { _dummy: () },
+        target: build.flags.target.iter().next().map(|x| &x[..])
+                     .unwrap_or(host.target)
+    };
+
+    // First, try to find steps on the command line.
+    add_steps(build, stage, &host, &target, &mut targets);
+
+    // If none are specified, then build everything.
+    if targets.len() == 0 {
+        let t = Step {
+            src: Source::Llvm { _dummy: () },
+            target: &build.config.build,
+        };
+        if build.config.docs {
+          targets.push(t.doc(stage));
+        }
+        for host in build.config.host.iter() {
+            if !build.flags.host.contains(host) {
+                continue
+            }
+            let host = t.target(host);
+            if host.target == build.config.build {
+                targets.push(host.librustc(host.compiler(stage)));
+            } else {
+                targets.push(host.librustc_link(t.compiler(stage), host.target));
+            }
+            for target in build.config.target.iter() {
+                if !build.flags.target.contains(target) {
+                    continue
+                }
+
+                if host.target == build.config.build {
+                    targets.push(host.target(target)
+                                     .libtest(host.compiler(stage)));
+                } else {
+                    targets.push(host.target(target)
+                                     .libtest_link(t.compiler(stage), host.target));
+                }
+            }
+        }
+    }
+
+    return targets
+
+}
+
+fn add_steps<'a>(build: &'a Build,
+                 stage: u32,
+                 host: &Step<'a>,
+                 target: &Step<'a>,
+                 targets: &mut Vec<Step<'a>>) {
+    struct Context<'a> {
+        stage: u32,
+        compiler: Compiler<'a>,
+        _dummy: (),
+        host: &'a str,
+    }
+    for step in build.flags.step.iter() {
+
+        // The macro below insists on hygienic access to all local variables, so
+        // we shove them all in a struct and subvert hygiene by accessing struct
+        // fields instead,
+        let cx = Context {
+            stage: stage,
+            compiler: host.target(&build.config.build).compiler(stage),
+            _dummy: (),
+            host: host.target,
+        };
+        macro_rules! add_step {
+            ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => ({$(
+                let name = stringify!($short).replace("_", "-");
+                if &step[..] == &name[..] {
+                    targets.push(target.$short($(cx.$arg),*));
+                    continue
+                }
+                drop(name);
+            )*})
+        }
+
+        targets!(add_step);
+
+        panic!("unknown step: {}", step);
+    }
+}
+
+macro_rules! constructors {
+    ($(($short:ident, $name:ident { $($arg:ident: $t:ty),* }),)*) => {$(
+        fn $short(&self, $($arg: $t),*) -> Step<'a> {
+            Step {
+                src: Source::$name { $($arg: $arg),* },
+                target: self.target,
+            }
+        }
+    )*}
+}
+
+impl<'a> Step<'a> {
+    fn compiler(&self, stage: u32) -> Compiler<'a> {
+        Compiler::new(stage, self.target)
+    }
+
+    fn target(&self, target: &'a str) -> Step<'a> {
+        Step { target: target, src: self.src.clone() }
+    }
+
+    // Define ergonomic constructors for each step defined above so they can be
+    // easily constructed.
+    targets!(constructors);
+
+    /// Mapping of all dependencies for rustbuild.
+    ///
+    /// This function receives a step, the build that we're building for, and
+    /// then returns a list of all the dependencies of that step.
+    pub fn deps(&self, build: &'a Build) -> Vec<Step<'a>> {
+        match self.src {
+            Source::Rustc { stage: 0 } => {
+                Vec::new()
+            }
+            Source::Rustc { stage } => {
+                let compiler = Compiler::new(stage - 1, &build.config.build);
+                vec![self.librustc(compiler)]
+            }
+            Source::Librustc { compiler } => {
+                vec![self.libtest(compiler), self.llvm(())]
+            }
+            Source::Libtest { compiler } => {
+                vec![self.libstd(compiler)]
+            }
+            Source::Libstd { compiler } => {
+                vec![self.compiler_rt(()),
+                     self.rustc(compiler.stage).target(compiler.host)]
+            }
+            Source::LibrustcLink { compiler, host } => {
+                vec![self.librustc(compiler),
+                     self.libtest_link(compiler, host)]
+            }
+            Source::LibtestLink { compiler, host } => {
+                vec![self.libtest(compiler), self.libstd_link(compiler, host)]
+            }
+            Source::LibstdLink { compiler, host } => {
+                vec![self.libstd(compiler),
+                     self.target(host).rustc(compiler.stage)]
+            }
+            Source::CompilerRt { _dummy } => {
+                vec![self.llvm(()).target(&build.config.build)]
+            }
+            Source::Llvm { _dummy } => Vec::new(),
+            Source::TestHelpers { _dummy } => Vec::new(),
+            Source::DebuggerScripts { stage: _ } => Vec::new(),
+
+            // Note that all doc targets depend on artifacts from the build
+            // architecture, not the target (which is where we're generating
+            // docs into).
+            Source::DocStd { stage } => {
+                let compiler = self.target(&build.config.build).compiler(stage);
+                vec![self.libstd(compiler)]
+            }
+            Source::DocTest { stage } => {
+                let compiler = self.target(&build.config.build).compiler(stage);
+                vec![self.libtest(compiler)]
+            }
+            Source::DocBook { stage } |
+            Source::DocNomicon { stage } |
+            Source::DocStyle { stage } => {
+                vec![self.target(&build.config.build).tool_rustbook(stage)]
+            }
+            Source::DocErrorIndex { stage } => {
+                vec![self.target(&build.config.build).tool_error_index(stage)]
+            }
+            Source::DocStandalone { stage } => {
+                vec![self.target(&build.config.build).rustc(stage)]
+            }
+            Source::DocRustc { stage } => {
+                vec![self.doc_test(stage)]
+            }
+            Source::Doc { stage } => {
+                vec![self.doc_book(stage), self.doc_nomicon(stage),
+                     self.doc_style(stage), self.doc_standalone(stage),
+                     self.doc_std(stage),
+                     self.doc_error_index(stage)]
+            }
+            Source::Check { stage, compiler } => {
+                // Check is just a pseudo step which means check all targets,
+                // so just depend on checking all targets.
+                build.config.target.iter().map(|t| {
+                    self.target(t).check_target(stage, compiler)
+                }).collect()
+            }
+            Source::CheckTarget { stage, compiler } => {
+                // CheckTarget here means run all possible test suites for this
+                // target. Most of the time, however, we can't actually run
+                // anything if we're not the build triple as we could be cross
+                // compiling.
+                //
+                // As a result, the base set of targets here is quite stripped
+                // down from the standard set of targets. These suites have
+                // their own internal logic to run in cross-compiled situations
+                // if they'll run at all. For example compiletest knows that
+                // when testing Android targets we ship artifacts to the
+                // emulator.
+                //
+                // When in doubt the rule of thumb for adding to this list is
+                // "should this test suite run on the android bot?"
+                let mut base = vec![
+                    self.check_rpass(compiler),
+                    self.check_rfail(compiler),
+                    self.check_crate_std(compiler),
+                    self.check_crate_test(compiler),
+                    self.check_debuginfo(compiler),
+                    self.dist(stage),
+                ];
+
+                // If we're testing the build triple, then we know we can
+                // actually run binaries and such, so we run all possible tests
+                // that we know about.
+                if self.target == build.config.build {
+                    base.extend(vec![
+                        // docs-related
+                        self.check_docs(compiler),
+                        self.check_error_index(compiler),
+                        self.check_rustdoc(compiler),
+
+                        // UI-related
+                        self.check_cfail(compiler),
+                        self.check_pfail(compiler),
+                        self.check_ui(compiler),
+
+                        // codegen-related
+                        self.check_incremental(compiler),
+                        self.check_codegen(compiler),
+                        self.check_codegen_units(compiler),
+
+                        // misc compiletest-test suites
+                        self.check_rpass_full(compiler),
+                        self.check_rfail_full(compiler),
+                        self.check_cfail_full(compiler),
+                        self.check_pretty_rpass_full(compiler),
+                        self.check_pretty_rfail_full(compiler),
+                        self.check_rpass_valgrind(compiler),
+                        self.check_rmake(compiler),
+
+                        // crates
+                        self.check_crate_rustc(compiler),
+
+                        // pretty
+                        self.check_pretty(compiler),
+                        self.check_pretty_rpass(compiler),
+                        self.check_pretty_rfail(compiler),
+                        self.check_pretty_rpass_valgrind(compiler),
+
+                        // misc
+                        self.check_linkcheck(stage),
+                        self.check_tidy(stage),
+                    ]);
+                }
+                return base
+            }
+            Source::CheckLinkcheck { stage } => {
+                vec![self.tool_linkchecker(stage), self.doc(stage)]
+            }
+            Source::CheckCargoTest { stage } => {
+                vec![self.tool_cargotest(stage),
+                     self.librustc(self.compiler(stage))]
+            }
+            Source::CheckTidy { stage } => {
+                vec![self.tool_tidy(stage)]
+            }
+            Source::CheckPrettyRPass { compiler } |
+            Source::CheckPrettyRFail { compiler } |
+            Source::CheckRFail { compiler } |
+            Source::CheckPFail { compiler } |
+            Source::CheckCodegen { compiler } |
+            Source::CheckCodegenUnits { compiler } |
+            Source::CheckIncremental { compiler } |
+            Source::CheckUi { compiler } |
+            Source::CheckRustdoc { compiler } |
+            Source::CheckPretty { compiler } |
+            Source::CheckCFail { compiler } |
+            Source::CheckRPassValgrind { compiler } |
+            Source::CheckRPass { compiler } => {
+                let mut base = vec![
+                    self.libtest(compiler),
+                    self.target(compiler.host).tool_compiletest(compiler.stage),
+                    self.test_helpers(()),
+                ];
+                if self.target.contains("android") {
+                    base.push(self.android_copy_libs(compiler));
+                }
+                base
+            }
+            Source::CheckDebuginfo { compiler } => {
+                vec![
+                    self.libtest(compiler),
+                    self.target(compiler.host).tool_compiletest(compiler.stage),
+                    self.test_helpers(()),
+                    self.debugger_scripts(compiler.stage),
+                ]
+            }
+            Source::CheckRPassFull { compiler } |
+            Source::CheckRFailFull { compiler } |
+            Source::CheckCFailFull { compiler } |
+            Source::CheckPrettyRPassFull { compiler } |
+            Source::CheckPrettyRFailFull { compiler } |
+            Source::CheckPrettyRPassValgrind { compiler } |
+            Source::CheckRMake { compiler } => {
+                vec![self.librustc(compiler),
+                     self.target(compiler.host).tool_compiletest(compiler.stage)]
+            }
+            Source::CheckDocs { compiler } => {
+                vec![self.libstd(compiler)]
+            }
+            Source::CheckErrorIndex { compiler } => {
+                vec![self.libstd(compiler),
+                     self.target(compiler.host).tool_error_index(compiler.stage)]
+            }
+            Source::CheckCrateStd { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+            Source::CheckCrateTest { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+            Source::CheckCrateRustc { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+
+            Source::ToolLinkchecker { stage } |
+            Source::ToolTidy { stage } => {
+                vec![self.libstd(self.compiler(stage))]
+            }
+            Source::ToolErrorIndex { stage } |
+            Source::ToolRustbook { stage } => {
+                vec![self.librustc(self.compiler(stage))]
+            }
+            Source::ToolCargoTest { stage } => {
+                vec![self.libstd(self.compiler(stage))]
+            }
+            Source::ToolCompiletest { stage } => {
+                vec![self.libtest(self.compiler(stage))]
+            }
+
+            Source::DistDocs { stage } => vec![self.doc(stage)],
+            Source::DistMingw { _dummy: _ } => Vec::new(),
+            Source::DistRustc { stage } => {
+                vec![self.rustc(stage)]
+            }
+            Source::DistStd { compiler } => {
+                // We want to package up as many target libraries as possible
+                // for the `rust-std` package, so if this is a host target we
+                // depend on librustc and otherwise we just depend on libtest.
+                if build.config.host.iter().any(|t| t == self.target) {
+                    vec![self.librustc(compiler)]
+                } else {
+                    vec![self.libtest(compiler)]
+                }
+            }
+
+            Source::Dist { stage } => {
+                let mut base = Vec::new();
+
+                for host in build.config.host.iter() {
+                    let host = self.target(host);
+                    base.push(host.dist_rustc(stage));
+                    if host.target.contains("windows-gnu") {
+                        base.push(host.dist_mingw(()));
+                    }
+
+                    let compiler = self.compiler(stage);
+                    for target in build.config.target.iter() {
+                        let target = self.target(target);
+                        if build.config.docs {
+                            base.push(target.dist_docs(stage));
+                        }
+                        base.push(target.dist_std(compiler));
+                    }
+                }
+                return base
+            }
+
+            Source::AndroidCopyLibs { compiler } => {
+                vec![self.libtest(compiler)]
+            }
+        }
+    }
+}
diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs
new file mode 100644 (file)
index 0000000..3ef7f8c
--- /dev/null
@@ -0,0 +1,142 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Various utility functions used throughout rustbuild.
+//!
+//! Simple things like testing the various filesystem operations here and there,
+//! not a lot of interesting happenings here unfortunately.
+
+use std::env;
+use std::ffi::OsString;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use filetime::FileTime;
+
+/// Returns the `name` as the filename of a static library for `target`.
+pub fn staticlib(name: &str, target: &str) -> String {
+    if target.contains("windows-msvc") {
+        format!("{}.lib", name)
+    } else {
+        format!("lib{}.a", name)
+    }
+}
+
+/// Returns the last-modified time for `path`, or zero if it doesn't exist.
+pub fn mtime(path: &Path) -> FileTime {
+    fs::metadata(path).map(|f| {
+        FileTime::from_last_modification_time(&f)
+    }).unwrap_or(FileTime::zero())
+}
+
+/// Copies a file from `src` to `dst`, attempting to use hard links and then
+/// falling back to an actually filesystem copy if necessary.
+pub fn copy(src: &Path, dst: &Path) {
+    let res = fs::hard_link(src, dst);
+    let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
+    if let Err(e) = res {
+        panic!("failed to copy `{}` to `{}`: {}", src.display(),
+               dst.display(), e)
+    }
+}
+
+/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
+/// when this function is called.
+pub fn cp_r(src: &Path, dst: &Path) {
+    for f in t!(fs::read_dir(src)) {
+        let f = t!(f);
+        let path = f.path();
+        let name = path.file_name().unwrap();
+        let dst = dst.join(name);
+        if t!(f.file_type()).is_dir() {
+            let _ = fs::remove_dir_all(&dst);
+            t!(fs::create_dir(&dst));
+            cp_r(&path, &dst);
+        } else {
+            let _ = fs::remove_file(&dst);
+            copy(&path, &dst);
+        }
+    }
+}
+
+/// Given an executable called `name`, return the filename for the
+/// executable for a particular target.
+pub fn exe(name: &str, target: &str) -> String {
+    if target.contains("windows") {
+        format!("{}.exe", name)
+    } else {
+        name.to_string()
+    }
+}
+
+/// Returns whether the file name given looks like a dynamic library.
+pub fn is_dylib(name: &str) -> bool {
+    name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
+}
+
+/// Returns the corresponding relative library directory that the compiler's
+/// dylibs will be found in.
+pub fn libdir(target: &str) -> &'static str {
+    if target.contains("windows") {"bin"} else {"lib"}
+}
+
+/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path.
+pub fn add_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
+    let mut list = dylib_path();
+    for path in path {
+        list.insert(0, path);
+    }
+    cmd.env(dylib_path_var(), t!(env::join_paths(list)));
+}
+
+/// Returns whether `dst` is up to date given that the file or files in `src`
+/// are used to generate it.
+///
+/// Uses last-modified time checks to verify this.
+pub fn up_to_date(src: &Path, dst: &Path) -> bool {
+    let threshold = mtime(dst);
+    let meta = t!(fs::metadata(src));
+    if meta.is_dir() {
+        dir_up_to_date(src, &threshold)
+    } else {
+        FileTime::from_last_modification_time(&meta) <= threshold
+    }
+}
+
+fn dir_up_to_date(src: &Path, threshold: &FileTime) -> bool {
+    t!(fs::read_dir(src)).map(|e| t!(e)).all(|e| {
+        let meta = t!(e.metadata());
+        if meta.is_dir() {
+            dir_up_to_date(&e.path(), threshold)
+        } else {
+            FileTime::from_last_modification_time(&meta) < *threshold
+        }
+    })
+}
+
+/// Returns the environment variable which the dynamic library lookup path
+/// resides in for this platform.
+pub fn dylib_path_var() -> &'static str {
+    if cfg!(target_os = "windows") {
+        "PATH"
+    } else if cfg!(target_os = "macos") {
+        "DYLD_LIBRARY_PATH"
+    } else {
+        "LD_LIBRARY_PATH"
+    }
+}
+
+/// Parses the `dylib_path_var()` environment variable, returning a list of
+/// paths that are members of this lookup path.
+pub fn dylib_path() -> Vec<PathBuf> {
+    env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new()))
+        .collect()
+}
index a6b4e9492181c790fe5a3c040ca6e02397824d8f..e8c88b7db0699d1ebe03b1827c29c4607aac3dd2 100644 (file)
@@ -339,7 +339,7 @@ fn call_with_ref<'a, F>(some_closure:F) -> i32
     where F: Fn(&'a 32) -> i32 {
 ```
 
-However this presents a problem with in our case. When you specify the explict
+However this presents a problem with in our case. When you specify the explicit
 lifetime on a function it binds that lifetime to the *entire* scope of the function
 instead of just the invocation scope of our closure. This means that the borrow checker
 will see a mutable reference in the same lifetime as our immutable reference and fail
@@ -354,7 +354,7 @@ fn call_with_ref<F>(some_closure:F) -> i32
 ```
 
 This lets the Rust compiler find the minimum lifetime to invoke our closure and
-satisfy the borrow checker's rules. Our function then compiles and excutes as we
+satisfy the borrow checker's rules. Our function then compiles and executes as we
 expect.
 
 ```rust
index a6ff75db89b88ddccbefe9f1b315df47d9e14240..78ab3c18e4561988429bdb55ffbec9da21eb8caf 100644 (file)
@@ -41,8 +41,9 @@ they get set in the [`[features]` section][features] of your `Cargo.toml`:
 # no features by default
 default = []
 
-# The “secure-password” feature depends on the bcrypt package.
-secure-password = ["bcrypt"]
+# Add feature "foo" here, then you can use it. 
+# Our "foo" feature depends on nothing else.
+foo = []
 ```
 
 When you do this, Cargo passes along a flag to `rustc`:
index 3c6643fbfe1554e0ae02c5bc551f0a04353715c2..6292ba9aac40317c41e590bbc58d549e2f6df179 100644 (file)
@@ -486,6 +486,17 @@ you have a module in `foo.rs`, you'll often open its code and see this:
 //! The `foo` module contains a lot of useful functionality blah blah blah
 ```
 
+### Crate documentation
+
+Crates can be documented by placing an inner doc comment (`//!`) at the
+beginning of the crate root, aka `lib.rs`:
+
+```rust
+//! This is documentation for the `foo` crate.
+//!
+//! The foo crate is meant to be used for bar.
+```
+
 ### Documentation comment style
 
 Check out [RFC 505][rfc505] for full conventions around the style and format of
index f48e87c42245eca39af7b28289b20903f6c5369a..3fbcbc2f471368ca8cef380ba8a9574724a2b021 100644 (file)
@@ -521,14 +521,14 @@ against `libc` and `libm` by default.
 
 # The "nullable pointer optimization"
 
-Certain types are defined to not be `null`. This includes references (`&T`,
+Certain types are defined to not be NULL. This includes references (`&T`,
 `&mut T`), boxes (`Box<T>`), and function pointers (`extern "abi" fn()`).
-When interfacing with C, pointers that might be null are often used.
+When interfacing with C, pointers that might be NULL are often used.
 As a special case, a generic `enum` that contains exactly two variants, one of
 which contains no data and the other containing a single field, is eligible
 for the "nullable pointer optimization". When such an enum is instantiated
 with one of the non-nullable types, it is represented as a single pointer,
-and the non-data variant is represented as the null pointer. So
+and the non-data variant is represented as the NULL pointer. So
 `Option<extern "C" fn(c_int) -> c_int>` is how one represents a nullable
 function pointer using the C ABI.
 
index e7d05a8d93a561eccb11f340ced8c8caac0b43af..700ab2be589326f5b30521f66cbc94705ae5c341 100644 (file)
@@ -11,7 +11,7 @@ an Internet connection to run the commands in this section, as we’ll be
 downloading Rust from the Internet.
 
 We’ll be showing off a number of commands using a terminal, and those lines all
-start with `$`. We don't need to type in the `$`s, they are there to indicate
+start with `$`. You don't need to type in the `$`s, they are there to indicate
 the start of each command. We’ll see many tutorials and examples around the web
 that follow this convention: `$` for commands run as our regular user, and `#`
 for commands we should be running as an administrator.
@@ -159,9 +159,11 @@ You should see the version number, commit hash, and commit date.
 If you do, Rust has been installed successfully! Congrats!
 
 If you don't and you're on Windows, check that Rust is in your %PATH% system
-variable. If it isn't, run the installer again, select "Change" on the "Change,
-repair, or remove installation" page and ensure "Add to PATH" is installed on
-the local hard drive.
+variable: `$ echo %PATH%`. If it isn't, run the installer again, select "Change"
+on the "Change, repair, or remove installation" page and ensure "Add to PATH" is
+installed on the local hard drive.  If you need to configure your path manually,
+you can find the Rust executables in a directory like
+`"C:\Program Files\Rust stable GNU 1.x\bin"`.
 
 Rust does not do its own linking, and so you’ll need to have a linker
 installed. Doing so will depend on your specific system, consult its
@@ -339,7 +341,8 @@ On Windows, you'd enter:
 
 ```bash
 $ dir
-main.exe  main.rs
+main.exe
+main.rs
 ```
 
 This shows we have two files: the source code, with an `.rs` extension, and the
@@ -347,7 +350,7 @@ executable (`main.exe` on Windows, `main` everywhere else). All that's left to
 do from here is run the `main` or `main.exe` file, like this:
 
 ```bash
-$ ./main  # or main.exe on Windows
+$ ./main  # or .\main.exe on Windows
 ```
 
 If *main.rs* were your "Hello, world!" program, this would print `Hello,
index c759ff9bdbde48e845a64dfbc158f198bd8178ff..6ce75efd1031d83ce7d372081090542a6091bf9b 100644 (file)
@@ -370,7 +370,7 @@ We could also use a range of versions.
 [Cargo’s documentation][cargodoc] contains more details.
 
 [semver]: http://semver.org
-[cargodoc]: http://doc.crates.io/crates-io.html
+[cargodoc]: http://doc.crates.io/specifying-dependencies.html
 
 Now, without changing any of our code, let’s build our project:
 
index 2c2d89a1fbf9ed3b46c881a19c0108fde7fd2217..a8340d9d31e79d2fe319794ef49f90da988b7721 100644 (file)
@@ -57,7 +57,7 @@ but you must add the right number of `:` if you skip them:
 asm!("xor %eax, %eax"
     :
     :
-    : "{eax}"
+    : "eax"
    );
 # } }
 ```
@@ -68,7 +68,7 @@ Whitespace also doesn't matter:
 # #![feature(asm)]
 # #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 # fn main() { unsafe {
-asm!("xor %eax, %eax" ::: "{eax}");
+asm!("xor %eax, %eax" ::: "eax");
 # } }
 ```
 
@@ -127,7 +127,7 @@ stay valid.
 # #[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
 # fn main() { unsafe {
 // Put the value 0x200 in eax
-asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "{eax}");
+asm!("mov $$0x200, %eax" : /* no outputs */ : /* no inputs */ : "eax");
 # } }
 ```
 
index e23e6f3a786a5c0e816edb6f27370f3aaf20caf7..e681d1bee06184549861f1045c1b765a1c2a0f7e 100644 (file)
@@ -105,7 +105,7 @@ When you need to keep track of how many times you already looped, you can use th
 #### On ranges:
 
 ```rust
-for (i,j) in (5..10).enumerate() {
+for (i, j) in (5..10).enumerate() {
     println!("i = {} and j = {}", i, j);
 }
 ```
index e46271511462449930ee4859ae7bbd3505d34a36..a0a49d55e105740572194aedfd870f25534b1a50 100644 (file)
@@ -62,8 +62,8 @@ Note that here, the `x` is mutable, but not the `y`.
 # Interior vs. Exterior Mutability
 
 However, when we say something is ‘immutable’ in Rust, that doesn’t mean that
-it’s not able to be changed: we mean something has ‘exterior mutability’. Consider,
-for example, [`Arc<T>`][arc]:
+it’s not able to be changed: we are referring to its ‘exterior mutability’ that
+in this case is immutable. Consider, for example, [`Arc<T>`][arc]:
 
 ```rust
 use std::sync::Arc;
index 679f5489ea8f4a331f1b25d7d5a0ba945d50052a..ae100aec3b5129c7ab43960e5458bd3dbd74b73e 100644 (file)
@@ -17,7 +17,7 @@ Here are some things to remember about raw pointers that are different than
 other pointer types. They:
 
 - are not guaranteed to point to valid memory and are not even
-  guaranteed to be non-null (unlike both `Box` and `&`);
+  guaranteed to be non-NULL (unlike both `Box` and `&`);
 - do not have any automatic clean-up, unlike `Box`, and so require
   manual resource management;
 - are plain-old-data, that is, they don't move ownership, again unlike
index 7be90e785b02b1772570a3cea24df89e83e09987..135778c38b50a2e8bc883b654a331da85389d3dc 100644 (file)
@@ -9,7 +9,7 @@ strings also work differently than in some other systems languages, such as C.
 Let’s dig into the details. A ‘string’ is a sequence of Unicode scalar values
 encoded as a stream of UTF-8 bytes. All strings are guaranteed to be a valid
 encoding of UTF-8 sequences. Additionally, unlike some systems languages,
-strings are not null-terminated and can contain null bytes.
+strings are not NUL-terminated and can contain NUL bytes.
 
 Rust has two main types of strings: `&str` and `String`. Let’s talk about
 `&str` first. These are called ‘string slices’. A string slice has a fixed
index b2fddf336273fe0d1f3d663e7bd09886579e41fb..328db25b819d89d236af7e8c2d8ea82ce63a5e1b 100644 (file)
@@ -163,11 +163,51 @@ struct Point(i32, i32, i32);
 let black = Color(0, 0, 0);
 let origin = Point(0, 0, 0);
 ```
-Here, `black` and `origin` are not equal, even though they contain the same
-values.
 
-It is almost always better to use a `struct` than a tuple struct. We
-would write `Color` and `Point` like this instead:
+Here, `black` and `origin` are not the same type, even though they contain the
+same values.
+
+The members of a tuple struct may be accessed by dot notation or destructuring
+`let`, just like regular tuples:
+
+```rust
+# struct Color(i32, i32, i32);
+# struct Point(i32, i32, i32);
+# let black = Color(0, 0, 0);
+# let origin = Point(0, 0, 0);
+let black_r = black.0;
+let Point(_, origin_y, origin_z) = origin;
+```
+
+Patterns like `Point(_, origin_y, origin_z)` are also used in
+[match expressions][match].
+
+One case when a tuple struct is very useful is when it has only one element.
+We call this the ‘newtype’ pattern, because it allows you to create a new type
+that is distinct from its contained value and also expresses its own semantic
+meaning:
+
+```rust
+struct Inches(i32);
+
+let length = Inches(10);
+
+let Inches(integer_length) = length;
+println!("length is {} inches", integer_length);
+```
+
+As above, you can extract the inner integer type through a destructuring `let`.
+In this case, the `let Inches(integer_length)` assigns `10` to `integer_length`.
+We could have used dot notation to do the same thing:
+
+```rust
+# struct Inches(i32);
+# let length = Inches(10);
+let integer_length = length.0;
+```
+
+It's always possible to use a `struct` instead of a tuple struct, and can be
+clearer. We could write `Color` and `Point` like this instead:
 
 ```rust
 struct Color {
@@ -187,32 +227,19 @@ Good names are important, and while values in a tuple struct can be
 referenced with dot notation as well, a `struct` gives us actual names,
 rather than positions.
 
-There _is_ one case when a tuple struct is very useful, though, and that is when
-it has only one element. We call this the ‘newtype’ pattern, because
-it allows you to create a new type that is distinct from its contained value
-and also expresses its own semantic meaning:
-
-```rust
-struct Inches(i32);
-
-let length = Inches(10);
-
-let Inches(integer_length) = length;
-println!("length is {} inches", integer_length);
-```
-
-As you can see here, you can extract the inner integer type through a
-destructuring `let`, as with regular tuples. In this case, the
-`let Inches(integer_length)` assigns `10` to `integer_length`.
+[match]: match.html
 
 # Unit-like structs
 
 You can define a `struct` with no members at all:
 
 ```rust
-struct Electron;
+struct Electron {} // use empty braces...
+struct Proton;     // ...or just a semicolon
 
-let x = Electron;
+// whether you declared the struct with braces or not, do the same when creating one
+let x = Electron {};
+let y = Proton;
 ```
 
 Such a `struct` is called ‘unit-like’ because it resembles the empty
index 7954085472e503dc64eceb946a71b89a11e80ab6..86729147ed0652befde62ee0834f12ec2c095d1c 100644 (file)
@@ -431,7 +431,7 @@ one.
 
 Cargo will ignore files in subdirectories of the `tests/` directory.
 Therefore shared modules in integrations tests are possible.
-For example `tests/common/mod.rs` is not seperatly compiled by cargo but can 
+For example `tests/common/mod.rs` is not separately compiled by cargo but can
 be imported in every test with `mod common;`
 
 That's all there is to the `tests` directory. The `tests` module isn't needed
index af4e351569f7f55920fd2db72c6841682098fe1c..9cab586b82c4713d0ee73597876af5843733e945 100644 (file)
@@ -63,7 +63,7 @@ In addition, the following are all undefined behaviors in Rust, and must be
 avoided, even when writing `unsafe` code:
 
 * Data races
-* Dereferencing a null/dangling raw pointer
+* Dereferencing a NULL/dangling raw pointer
 * Reads of [undef][undef] (uninitialized) memory
 * Breaking the [pointer aliasing rules][aliasing] with raw pointers.
 * `&mut T` and `&T` follow LLVM’s scoped [noalias][noalias] model, except if
@@ -77,7 +77,7 @@ avoided, even when writing `unsafe` code:
   * Using `std::ptr::copy_nonoverlapping_memory` (`memcpy32`/`memcpy64`
     intrinsics) on overlapping buffers
 * Invalid values in primitive types, even in private fields/locals:
-  * Null/dangling references or boxes
+  * NULL/dangling references or boxes
   * A value other than `false` (0) or `true` (1) in a `bool`
   * A discriminant in an `enum` not included in its type definition
   * A value in a `char` which is a surrogate or above `char::MAX`
index 33f22e8579664815349f342aa78794639de159e1..554ab66bc563d85328bd8c8303bcf035ac8fb380 100755 (executable)
 
 import gdb
 import re
+import sys
 import debugger_pretty_printers_common as rustpp
 
+# We want a version of `range` which doesn't allocate an intermediate list,
+# specifically it should use a lazy iterator. In Python 2 this was `xrange`, but
+# if we're running with Python 3 then we need to use `range` instead.
+if sys.version_info.major >= 3:
+    xrange = range
+
 #===============================================================================
 # GDB Pretty Printing Module for Rust
 #===============================================================================
@@ -215,7 +222,7 @@ class RustSlicePrinter:
         assert data_ptr.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
         raw_ptr = data_ptr.get_wrapped_value()
 
-        for index in range(0, length):
+        for index in xrange(0, length):
             yield (str(index), (raw_ptr + index).dereference())
 
 
@@ -244,7 +251,7 @@ class RustStdVecPrinter:
     def children(self):
         (length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(self.__val)
         gdb_ptr = data_ptr.get_wrapped_value()
-        for index in range(0, length):
+        for index in xrange(0, length):
             yield (str(index), (gdb_ptr + index).dereference())
 
 
index 28e3363189a082c6152c9bffb8443b44ac190102..127251cc802c9ccec20dc19c01901cbad412f43e 100644 (file)
@@ -31,8 +31,6 @@ def main(triple):
     filename = 'rustc-{}-{}.tar.gz'.format(channel, triple)
     url = 'https://static.rust-lang.org/dist/{}/{}'.format(date, filename)
     dst = dl_dir + '/' + filename
-    if os.path.exists(dst):
-        os.unlink(dst)
     bootstrap.get(url, dst)
 
     stage0_dst = triple + '/stage0'
index a873be455d5558c673c3f6cd4d0d50f89f611bbd..2beb652aa017a6d2c16e959afe05d592a203224d 100644 (file)
@@ -10,7 +10,8 @@
 
 #![allow(deprecated)]
 
-//! Thread-local reference-counted boxes (the `Rc<T>` type).
+//! Unsynchronized reference-counted boxes (the `Rc<T>` type) which are usable
+//! only within a single thread.
 //!
 //! The `Rc<T>` type provides shared ownership of an immutable value.
 //! Destruction is deterministic, and will occur as soon as the last owner is
index 94baf188bcaeea9d28f582e6071fed9eb30c8afc..edb965c1962e3bf6433b487165f8d164c39616e4 100644 (file)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn transmute<T, U>(e: T) -> U;
 
-    /// Gives the address for the return value of the enclosing function.
-    ///
-    /// Using this intrinsic in a function that does not use an out pointer
-    /// will trigger a compiler error.
-    pub fn return_address() -> *const u8;
-
     /// Returns `true` if the actual type given as `T` requires drop
     /// glue; returns `false` if the actual type provided for `T`
     /// implements `Copy`.
index 3ebab266e2ffed5494cc069a90172e64833f9b83..dffe9dee022a68704fc565efa10467bc56c2725b 100644 (file)
@@ -1198,17 +1198,15 @@ impl<I: ExactSizeIterator> ExactSizeIterator for Peekable<I> {}
 impl<I: Iterator> Peekable<I> {
     /// Returns a reference to the next() value without advancing the iterator.
     ///
-    /// The `peek()` method will return the value that a call to [`next()`] would
-    /// return, but does not advance the iterator. Like [`next()`], if there is
-    /// a value, it's wrapped in a `Some(T)`, but if the iterator is over, it
-    /// will return `None`.
+    /// Like [`next()`], if there is a value, it is wrapped in a `Some(T)`.
+    /// But if the iteration is over, `None` is returned.
     ///
     /// [`next()`]: trait.Iterator.html#tymethod.next
     ///
-    /// Because `peek()` returns reference, and many iterators iterate over
-    /// references, this leads to a possibly confusing situation where the
+    /// Because `peek()` returns reference, and many iterators iterate over
+    /// references, there can be a possibly confusing situation where the
     /// return value is a double reference. You can see this effect in the
-    /// examples below, with `&&i32`.
+    /// examples below.
     ///
     /// # Examples
     ///
@@ -1225,13 +1223,13 @@ impl<I: Iterator> Peekable<I> {
     ///
     /// assert_eq!(iter.next(), Some(&2));
     ///
-    /// // we can peek() multiple times, the iterator won't advance
+    /// // The iterator does not advance even if we `peek` multiple times
     /// assert_eq!(iter.peek(), Some(&&3));
     /// assert_eq!(iter.peek(), Some(&&3));
     ///
     /// assert_eq!(iter.next(), Some(&3));
     ///
-    /// // after the iterator is finished, so is peek()
+    /// // After the iterator is finished, so is `peek()`
     /// assert_eq!(iter.peek(), None);
     /// assert_eq!(iter.next(), None);
     /// ```
@@ -1263,10 +1261,10 @@ pub fn peek(&mut self) -> Option<&I::Item> {
     ///
     /// let mut iter = xs.iter().peekable();
     ///
-    /// // there are still elements to iterate over
+    /// // There are still elements to iterate over
     /// assert_eq!(iter.is_empty(), false);
     ///
-    /// // let's consume the iterator
+    /// // Let's consume the iterator
     /// iter.next();
     /// iter.next();
     /// iter.next();
index 3549bd6a3bc68f4fcfa52cab27ed36a9817b5378..9b5c2128f1eaf9fd59460d6fc432794f0a05fc26 100644 (file)
@@ -371,13 +371,16 @@ pub trait Extend<A> {
 /// Basic usage:
 ///
 /// ```
-/// let numbers = vec![1, 2, 3];
+/// let numbers = vec![1, 2, 3, 4, 5, 6];
 ///
 /// let mut iter = numbers.iter();
 ///
 /// assert_eq!(Some(&1), iter.next());
-/// assert_eq!(Some(&3), iter.next_back());
-/// assert_eq!(Some(&2), iter.next_back());
+/// assert_eq!(Some(&6), iter.next_back());
+/// assert_eq!(Some(&5), iter.next_back());
+/// assert_eq!(Some(&2), iter.next());
+/// assert_eq!(Some(&3), iter.next());
+/// assert_eq!(Some(&4), iter.next());
 /// assert_eq!(None, iter.next());
 /// assert_eq!(None, iter.next_back());
 /// ```
@@ -395,13 +398,16 @@ pub trait DoubleEndedIterator: Iterator {
     /// Basic usage:
     ///
     /// ```
-    /// let numbers = vec![1, 2, 3];
+    /// let numbers = vec![1, 2, 3, 4, 5, 6];
     ///
     /// let mut iter = numbers.iter();
     ///
     /// assert_eq!(Some(&1), iter.next());
-    /// assert_eq!(Some(&3), iter.next_back());
-    /// assert_eq!(Some(&2), iter.next_back());
+    /// assert_eq!(Some(&6), iter.next_back());
+    /// assert_eq!(Some(&5), iter.next_back());
+    /// assert_eq!(Some(&2), iter.next());
+    /// assert_eq!(Some(&3), iter.next());
+    /// assert_eq!(Some(&4), iter.next());
     /// assert_eq!(None, iter.next());
     /// assert_eq!(None, iter.next_back());
     /// ```
index 79e1462eaa135eb58013a157fb584f6823b5485f..07b05f91f489f9ecfa3752b2175e5f0bc35a3251 100644 (file)
 use num::Float;
 use num::FpCategory as Fp;
 
+/// The radix or base of the internal representation of `f32`.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const RADIX: u32 = 2;
 
+/// Number of significant digits in base 2.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MANTISSA_DIGITS: u32 = 24;
+/// Approximate number of significant digits in base 10.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const DIGITS: u32 = 6;
 
+/// Difference between `1.0` and the next largest representable number.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const EPSILON: f32 = 1.19209290e-07_f32;
 
-/// Smallest finite f32 value
+/// Smallest finite `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN: f32 = -3.40282347e+38_f32;
-/// Smallest positive, normalized f32 value
+/// Smallest positive normal `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN_POSITIVE: f32 = 1.17549435e-38_f32;
-/// Largest finite f32 value
+/// Largest finite `f32` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MAX: f32 = 3.40282347e+38_f32;
 
+/// One greater than the minimum possible normal power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_EXP: i32 = -125;
+/// Maximum possible power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_EXP: i32 = 128;
 
+/// Minimum possible normal power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_10_EXP: i32 = -37;
+/// Maximum possible power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_10_EXP: i32 = 38;
 
+/// Not a Number (NaN).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NAN: f32 = 0.0_f32/0.0_f32;
+/// Infinity (∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const INFINITY: f32 = 1.0_f32/0.0_f32;
+/// Negative infinity (-∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NEG_INFINITY: f32 = -1.0_f32/0.0_f32;
 
 /// Basic mathematical constants.
 pub mod consts {
     // FIXME: replace with mathematical constants from cmath.
 
-    /// Archimedes' constant
+    /// Archimedes' constant (π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const PI: f32 = 3.14159265358979323846264338327950288_f32;
 
-    /// pi/2.0
+    /// π/2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_2: f32 = 1.57079632679489661923132169163975144_f32;
 
-    /// pi/3.0
+    /// π/3
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_3: f32 = 1.04719755119659774615421446109316763_f32;
 
-    /// pi/4.0
+    /// π/4
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_4: f32 = 0.785398163397448309615660845819875721_f32;
 
-    /// pi/6.0
+    /// π/6
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_6: f32 = 0.52359877559829887307710723054658381_f32;
 
-    /// pi/8.0
+    /// π/8
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_8: f32 = 0.39269908169872415480783042290993786_f32;
 
-    /// 1.0/pi
+    /// 1
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_PI: f32 = 0.318309886183790671537767526745028724_f32;
 
-    /// 2.0/pi
+    /// 2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_PI: f32 = 0.636619772367581343075535053490057448_f32;
 
-    /// 2.0/sqrt(pi)
+    /// 2/sqrt(π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_SQRT_PI: f32 = 1.12837916709551257389615890312154517_f32;
 
-    /// sqrt(2.0)
+    /// sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const SQRT_2: f32 = 1.41421356237309504880168872420969808_f32;
 
-    /// 1.0/sqrt(2.0)
+    /// 1/sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_SQRT_2: f32 = 0.707106781186547524400844362104849039_f32;
 
-    /// Euler's number
+    /// Euler's number (e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const E: f32 = 2.71828182845904523536028747135266250_f32;
 
-    /// log2(e)
+    /// log<sub>2</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG2_E: f32 = 1.44269504088896340735992468100189214_f32;
 
-    /// log10(e)
+    /// log<sub>10</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG10_E: f32 = 0.434294481903251827651128918916605082_f32;
 
-    /// ln(2.0)
+    /// ln(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_2: f32 = 0.693147180559945309417232121458176568_f32;
 
-    /// ln(10.0)
+    /// ln(10)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_10: f32 = 2.30258509299404568401799145468436421_f32;
 }
index 35557f61c45420b5ff291aa369876a63e94be7aa..82a09e599e027a49065a342fcaac64fd31da2a79 100644 (file)
 use num::FpCategory as Fp;
 use num::Float;
 
+/// The radix or base of the internal representation of `f64`.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const RADIX: u32 = 2;
 
+/// Number of significant digits in base 2.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MANTISSA_DIGITS: u32 = 53;
+/// Approximate number of significant digits in base 10.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const DIGITS: u32 = 15;
 
+/// Difference between `1.0` and the next largest representable number.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const EPSILON: f64 = 2.2204460492503131e-16_f64;
 
-/// Smallest finite f64 value
+/// Smallest finite `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN: f64 = -1.7976931348623157e+308_f64;
-/// Smallest positive, normalized f64 value
+/// Smallest positive normal `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MIN_POSITIVE: f64 = 2.2250738585072014e-308_f64;
-/// Largest finite f64 value
+/// Largest finite `f64` value.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub const MAX: f64 = 1.7976931348623157e+308_f64;
 
+/// One greater than the minimum possible normal power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_EXP: i32 = -1021;
+/// Maximum possible power of 2 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_EXP: i32 = 1024;
 
+/// Minimum possible normal power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN_10_EXP: i32 = -307;
+/// Maximum possible power of 10 exponent.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX_10_EXP: i32 = 308;
 
+/// Not a Number (NaN).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NAN: f64 = 0.0_f64/0.0_f64;
+/// Infinity (∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const INFINITY: f64 = 1.0_f64/0.0_f64;
+/// Negative infinity (-∞).
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const NEG_INFINITY: f64 = -1.0_f64/0.0_f64;
 
 /// Basic mathematical constants.
 pub mod consts {
     // FIXME: replace with mathematical constants from cmath.
 
-    /// Archimedes' constant
+    /// Archimedes' constant (π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const PI: f64 = 3.14159265358979323846264338327950288_f64;
 
-    /// pi/2.0
+    /// π/2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_2: f64 = 1.57079632679489661923132169163975144_f64;
 
-    /// pi/3.0
+    /// π/3
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_3: f64 = 1.04719755119659774615421446109316763_f64;
 
-    /// pi/4.0
+    /// π/4
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_4: f64 = 0.785398163397448309615660845819875721_f64;
 
-    /// pi/6.0
+    /// π/6
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_6: f64 = 0.52359877559829887307710723054658381_f64;
 
-    /// pi/8.0
+    /// π/8
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_PI_8: f64 = 0.39269908169872415480783042290993786_f64;
 
-    /// 1.0/pi
+    /// 1
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_PI: f64 = 0.318309886183790671537767526745028724_f64;
 
-    /// 2.0/pi
+    /// 2
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_PI: f64 = 0.636619772367581343075535053490057448_f64;
 
-    /// 2.0/sqrt(pi)
+    /// 2/sqrt(π)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_2_SQRT_PI: f64 = 1.12837916709551257389615890312154517_f64;
 
-    /// sqrt(2.0)
+    /// sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const SQRT_2: f64 = 1.41421356237309504880168872420969808_f64;
 
-    /// 1.0/sqrt(2.0)
+    /// 1/sqrt(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const FRAC_1_SQRT_2: f64 = 0.707106781186547524400844362104849039_f64;
 
-    /// Euler's number
+    /// Euler's number (e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const E: f64 = 2.71828182845904523536028747135266250_f64;
 
-    /// log2(e)
+    /// log<sub>2</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
 
-    /// log10(e)
+    /// log<sub>10</sub>(e)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LOG10_E: f64 = 0.434294481903251827651128918916605082_f64;
 
-    /// ln(2.0)
+    /// ln(2)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_2: f64 = 0.693147180559945309417232121458176568_f64;
 
-    /// ln(10.0)
+    /// ln(10)
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const LN_10: f64 = 2.30258509299404568401799145468436421_f64;
 }
index bd6cfc427affd04a936a2a115440c5989eb7606f..e74c30d5e5af8db76279a044e1051269f2354bbf 100644 (file)
 
 macro_rules! int_module { ($T:ident, $bits:expr) => (
 
+/// The smallest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN: $T = $T::min_value();
+/// The largest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX: $T = $T::max_value();
 
 ) }
index 0d79398a8f1d51bd26e975a0a45fe16fe5b58aab..b41ef7984bbab5c3131ba0bd9a84a20a307cdfb1 100644 (file)
@@ -11,7 +11,6 @@
 //! Numeric traits and functions for the built-in numeric types.
 
 #![stable(feature = "rust1", since = "1.0.0")]
-#![allow(missing_docs)]
 
 use char::CharExt;
 use cmp::PartialOrd;
index 2ab2f9548ef1bfd5ec67ccfb79b1a43efa333329..cc9256ab6bf4ee34fd5e2126eb8f9e66e77461b5 100644 (file)
 
 macro_rules! uint_module { ($T:ident, $bits:expr) => (
 
+/// The smallest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MIN: $T = $T::min_value();
+/// The largest value that can be represented by this integer type.
 #[stable(feature = "rust1", since = "1.0.0")]
-#[allow(missing_docs)]
 pub const MAX: $T = $T::max_value();
 
 ) }
index a139dd152f006f5bcc51d26c0d26a1cb33ea7a21..e1e681b7aff3541e3a8fe2c2589d85aca9128ada 100644 (file)
@@ -836,7 +836,7 @@ pub enum Expr_ {
     ExprVec(HirVec<P<Expr>>),
     /// A function call
     ///
-    /// The first field resolves to the function itself,
+    /// The first field resolves to the function itself (usually an `ExprPath`),
     /// and the second field is the list of arguments
     ExprCall(P<Expr>, HirVec<P<Expr>>),
     /// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`)
@@ -845,9 +845,9 @@ pub enum Expr_ {
     /// The vector of `Ty`s are the ascripted type parameters for the method
     /// (within the angle brackets).
     ///
-    /// The first element of the vector of `Expr`s is the expression that evaluates
-    /// to the object on which the method is being called on (the receiver),
-    /// and the remaining elements are the rest of the arguments.
+    /// The first element of the vector of `Expr`s is the expression that
+    /// evaluates to the object on which the method is being called on (the
+    /// receiver), and the remaining elements are the rest of the arguments.
     ///
     /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
     /// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`.
@@ -919,13 +919,13 @@ pub enum Expr_ {
     /// Inline assembly (from `asm!`), with its outputs and inputs.
     ExprInlineAsm(InlineAsm, Vec<P<Expr>>, Vec<P<Expr>>),
 
-    /// A struct literal expression.
+    /// A struct or struct-like variant literal expression.
     ///
     /// For example, `Foo {x: 1, y: 2}`, or
     /// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
     ExprStruct(Path, HirVec<Field>, Option<P<Expr>>),
 
-    /// A vector literal constructed from one repeated element.
+    /// An array literal constructed from one repeated element.
     ///
     /// For example, `[1; 5]`. The first expression is the element
     /// to be repeated; the second is the number of times to repeat it.
@@ -950,14 +950,21 @@ pub struct QSelf {
     pub position: usize,
 }
 
+/// Hints at the original code for a `match _ { .. }`
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum MatchSource {
+    /// A `match _ { .. }`
     Normal,
+    /// An `if let _ = _ { .. }` (optionally with `else { .. }`)
     IfLetDesugar {
         contains_else_clause: bool,
     },
+    /// A `while let _ = _ { .. }` (which was desugared to a
+    /// `loop { match _ { .. } }`)
     WhileLetDesugar,
+    /// A desugared `for _ in _ { .. }` loop
     ForLoopDesugar,
+    /// A desugared `?` operator
     TryDesugar,
 }
 
@@ -975,8 +982,7 @@ pub struct MutTy {
     pub mutbl: Mutability,
 }
 
-/// Represents a method's signature in a trait declaration,
-/// or in an implementation.
+/// Represents a method's signature in a trait declaration or implementation.
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct MethodSig {
     pub unsafety: Unsafety,
@@ -999,13 +1005,20 @@ pub struct TraitItem {
     pub span: Span,
 }
 
+/// Represents a trait method or associated constant or type
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum TraitItem_ {
+    /// An associated constant with an optional value (otherwise `impl`s
+    /// must contain a value)
     ConstTraitItem(P<Ty>, Option<P<Expr>>),
+    /// A method with an optional body
     MethodTraitItem(MethodSig, Option<P<Block>>),
+    /// An associated type with (possibly empty) bounds and optional concrete
+    /// type
     TypeTraitItem(TyParamBounds, Option<P<Ty>>),
 }
 
+/// Represents anything within an `impl` block
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct ImplItem {
     pub id: NodeId,
@@ -1017,10 +1030,15 @@ pub struct ImplItem {
     pub span: Span,
 }
 
+/// Represents different contents within `impl`s
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum ImplItemKind {
+    /// An associated constant of the given type, set to the constant result
+    /// of the expression
     Const(P<Ty>, P<Expr>),
+    /// A method implementation with the given signature and body
     Method(MethodSig, P<Block>),
+    /// An associated type
     Type(P<Ty>),
 }
 
index a37990061920b6fa299a2de535010bb78a8490e7..5ccc96210be78d7d56cc8b739544ec05a1fce18e 100644 (file)
@@ -197,23 +197,70 @@ pub struct OutputFilenames {
     pub outputs: HashMap<OutputType, Option<PathBuf>>,
 }
 
+/// Codegen unit names generated by the numbered naming scheme will contain this
+/// marker right before the index of the codegen unit.
+pub const NUMBERED_CODEGEN_UNIT_MARKER: &'static str = ".cgu-";
+
 impl OutputFilenames {
     pub fn path(&self, flavor: OutputType) -> PathBuf {
         self.outputs.get(&flavor).and_then(|p| p.to_owned())
             .or_else(|| self.single_output_file.clone())
-            .unwrap_or_else(|| self.temp_path(flavor))
+            .unwrap_or_else(|| self.temp_path(flavor, None))
     }
 
-    pub fn temp_path(&self, flavor: OutputType) -> PathBuf {
+    /// Get the path where a compilation artifact of the given type for the
+    /// given codegen unit should be placed on disk. If codegen_unit_name is
+    /// None, a path distinct from those of any codegen unit will be generated.
+    pub fn temp_path(&self,
+                     flavor: OutputType,
+                     codegen_unit_name: Option<&str>)
+                     -> PathBuf {
+        let extension = match flavor {
+            OutputType::Bitcode => "bc",
+            OutputType::Assembly => "s",
+            OutputType::LlvmAssembly => "ll",
+            OutputType::Object => "o",
+            OutputType::DepInfo => "d",
+            OutputType::Exe => "",
+        };
+
+        self.temp_path_ext(extension, codegen_unit_name)
+    }
+
+    /// Like temp_path, but also supports things where there is no corresponding
+    /// OutputType, like no-opt-bitcode or lto-bitcode.
+    pub fn temp_path_ext(&self,
+                         ext: &str,
+                         codegen_unit_name: Option<&str>)
+                         -> PathBuf {
         let base = self.out_directory.join(&self.filestem());
-        match flavor {
-            OutputType::Bitcode => base.with_extension("bc"),
-            OutputType::Assembly => base.with_extension("s"),
-            OutputType::LlvmAssembly => base.with_extension("ll"),
-            OutputType::Object => base.with_extension("o"),
-            OutputType::DepInfo => base.with_extension("d"),
-            OutputType::Exe => base,
+
+        let mut extension = String::new();
+
+        if let Some(codegen_unit_name) = codegen_unit_name {
+            if codegen_unit_name.contains(NUMBERED_CODEGEN_UNIT_MARKER) {
+                // If we use the numbered naming scheme for modules, we don't want
+                // the files to look like <crate-name><extra>.<crate-name>.<index>.<ext>
+                // but simply <crate-name><extra>.<index>.<ext>
+                let marker_offset = codegen_unit_name.rfind(NUMBERED_CODEGEN_UNIT_MARKER)
+                                                     .unwrap();
+                let index_offset = marker_offset + NUMBERED_CODEGEN_UNIT_MARKER.len();
+                extension.push_str(&codegen_unit_name[index_offset .. ]);
+            } else {
+                extension.push_str(codegen_unit_name);
+            };
+        }
+
+        if !ext.is_empty() {
+            if !extension.is_empty() {
+                extension.push_str(".");
+            }
+
+            extension.push_str(ext);
         }
+
+        let path = base.with_extension(&extension[..]);
+        path
     }
 
     pub fn with_extension(&self, extension: &str) -> PathBuf {
index 277789f5312eee0dd9ce8c73e133714ec6d0832a..eef2b6e6f37b412c0df669583a8ea4bc920e88e5 100644 (file)
@@ -1081,7 +1081,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session,
 
         // Remove assembly source, unless --save-temps was specified
         if !sess.opts.cg.save_temps {
-            fs::remove_file(&outputs.temp_path(OutputType::Assembly)).unwrap();
+            fs::remove_file(&outputs.temp_path(OutputType::Assembly, None)).unwrap();
         }
     } else {
         time(sess.time_passes(),
index 66b0d663424aa829a95c673de86b76e27b63ae8e..a7fb039c295f8bdbda2e9cb3356e3ab1a8232a08 100644 (file)
@@ -980,7 +980,7 @@ pub struct Resolver<'a> {
     //
     // There will be an anonymous module created around `g` with the ID of the
     // entry block for `f`.
-    module_map: NodeMap<Module<'a>>,
+    pub module_map: NodeMap<Module<'a>>,
 
     // Whether or not to print error messages. Can be set to true
     // when getting additional info for error message suggestions,
@@ -2674,6 +2674,34 @@ fn with_no_errors<T, F>(&mut self, f: F) -> T
         rs
     }
 
+    // Calls `f` with a `Resolver` whose current lexical scope is `module`'s lexical scope,
+    // i.e. the module's items and the prelude (unless the module is `#[no_implicit_prelude]`).
+    // FIXME #34673: This needs testing.
+    pub fn with_module_lexical_scope<T, F>(&mut self, module: Module<'a>, f: F) -> T
+        where F: FnOnce(&mut Resolver<'a>) -> T,
+    {
+        self.with_empty_ribs(|this| {
+            this.value_ribs.push(Rib::new(ModuleRibKind(module)));
+            this.type_ribs.push(Rib::new(ModuleRibKind(module)));
+            f(this)
+        })
+    }
+
+    fn with_empty_ribs<T, F>(&mut self, f: F) -> T
+        where F: FnOnce(&mut Resolver<'a>) -> T,
+    {
+        use ::std::mem::replace;
+        let value_ribs = replace(&mut self.value_ribs, Vec::new());
+        let type_ribs = replace(&mut self.type_ribs, Vec::new());
+        let label_ribs = replace(&mut self.label_ribs, Vec::new());
+
+        let result = f(self);
+        self.value_ribs = value_ribs;
+        self.type_ribs = type_ribs;
+        self.label_ribs = label_ribs;
+        result
+    }
+
     fn find_fallback_in_self_type(&mut self, name: Name) -> FallbackSuggestion {
         fn extract_node_id(t: &Ty) -> Option<NodeId> {
             match t.node {
@@ -2880,8 +2908,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
                                 if !msg.is_empty() {
                                     msg = format!(". Did you mean {}?", msg);
                                 } else {
-                                    // we check if this a module and if so, we display a help
-                                    // message
+                                    // we display a help message if this is a module
                                     let name_path = path.segments.iter()
                                                         .map(|seg| seg.identifier.name)
                                                         .collect::<Vec<_>>();
index c1960eeee46b8fb7eccede5f0fda8f5f911e96b4..4ffb5477305493e1e366a73dd125a6ffde0b85eb 100644 (file)
@@ -29,6 +29,7 @@
 
 use rustc::hir::def::Def;
 use rustc::hir::def_id::DefId;
+use rustc::hir::map::Node;
 use rustc::session::Session;
 use rustc::ty::{self, TyCtxt, ImplOrTraitItem, ImplOrTraitItemContainer};
 
@@ -1299,7 +1300,14 @@ fn visit_expr(&mut self, ex: &ast::Expr) {
             ast::ExprKind::TupField(ref sub_ex, idx) => {
                 self.visit_expr(&sub_ex);
 
-                let hir_node = self.save_ctxt.tcx.map.expect_expr(sub_ex.id);
+                let hir_node = match self.save_ctxt.tcx.map.find(sub_ex.id) {
+                    Some(Node::NodeExpr(expr)) => expr,
+                    _ => {
+                        debug!("Missing or weird node for sub-expression {} in {:?}",
+                               sub_ex.id, ex);
+                        return;
+                    }
+                };
                 let ty = &self.tcx.expr_ty_adjusted(&hir_node).sty;
                 match *ty {
                     ty::TyStruct(def, _) => {
index 3ef6e29a6f83894da32612e3ba11b5fa4fcd780a..10af326be26a3091af33c7709bae7bd11d9c0f6f 100644 (file)
@@ -1495,20 +1495,27 @@ fn borrow(&mut self, _: ast::NodeId, _: Span, _: mc::cmt, _: ty::Region,
     fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {}
 
     fn mutate(&mut self, _: ast::NodeId, _: Span, cmt: mc::cmt, _: euv::MutateMode) {
+        let cmt_id = |cmt: &mc::cmt| match cmt.cat {
+            Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, ..}, ..}) |
+            Categorization::Local(vid) => Some(vid),
+            Categorization::Interior(ref base_cmt, mc::InteriorField(_)) => Some(base_cmt.id),
+            _ => None
+        };
         match cmt.cat {
             Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
             Categorization::Local(vid) => self.reassigned |= self.node == vid,
-            Categorization::Interior(ref base_cmt, mc::InteriorField(field)) => {
-                match base_cmt.cat {
-                    Categorization::Upvar(mc::Upvar { id: ty::UpvarId { var_id: vid, .. }, .. }) |
-                    Categorization::Local(vid) => {
-                        self.reassigned |= self.node == vid &&
-                            (self.field.is_none() || Some(field) == self.field)
-                    },
-                    _ => {}
+            ref cat => {
+                let mut cat = cat;
+                while let &Categorization::Interior(ref base_cmt, mc::InteriorField(field)) = cat {
+                    if let Some(vid) = cmt_id(base_cmt) {
+                        if self.node == vid && (self.field.is_none() || self.field == Some(field)) {
+                            self.reassigned = true;
+                            return;
+                        }
+                    }
+                    cat = &base_cmt.cat;
                 }
-            },
-            _ => {}
+            }
         }
     }
 }
index df3d2d149b99ce072fbbf3ba13a1755fafa67455..6c2a09f8060c5a4b36fb87e1701d74bff6f62ed7 100644 (file)
@@ -229,6 +229,7 @@ pub fn store_fn_arg(&self, bcx: &BlockAndBuilder, idx: &mut usize, dst: ValueRef
 ///
 /// I will do my best to describe this structure, but these
 /// comments are reverse-engineered and may be inaccurate. -NDM
+#[derive(Clone)]
 pub struct FnType {
     /// The LLVM types of each argument.
     pub args: Vec<ArgType>,
index 744712b22b060ef3cafafef22dc282fb8329daa2..a9f3d2f8a175485b954c016e6d2e207a7c4084ff 100644 (file)
@@ -205,7 +205,7 @@ pub fn link_binary(sess: &Session,
 
     // Remove the temporary object file and metadata if we aren't saving temps
     if !sess.opts.cg.save_temps {
-        for obj in object_filenames(sess, outputs) {
+        for obj in object_filenames(trans, outputs) {
             remove(sess, &obj);
         }
         remove(sess, &outputs.with_extension("metadata.o"));
@@ -316,7 +316,7 @@ fn link_binary_output(sess: &Session,
                       crate_type: config::CrateType,
                       outputs: &OutputFilenames,
                       crate_name: &str) -> PathBuf {
-    let objects = object_filenames(sess, outputs);
+    let objects = object_filenames(trans, outputs);
     let default_filename = filename_for_input(sess, crate_type, crate_name,
                                               outputs);
     let out_filename = outputs.outputs.get(&OutputType::Exe)
@@ -356,10 +356,11 @@ fn link_binary_output(sess: &Session,
     out_filename
 }
 
-fn object_filenames(sess: &Session, outputs: &OutputFilenames) -> Vec<PathBuf> {
-    (0..sess.opts.cg.codegen_units).map(|i| {
-        let ext = format!("{}.o", i);
-        outputs.temp_path(OutputType::Object).with_extension(&ext)
+fn object_filenames(trans: &CrateTranslation,
+                    outputs: &OutputFilenames)
+                    -> Vec<PathBuf> {
+    trans.modules.iter().map(|module| {
+        outputs.temp_path(OutputType::Object, Some(&module.name[..]))
     }).collect()
 }
 
@@ -497,7 +498,7 @@ fn link_rlib<'a>(sess: &'a Session,
                 ab.add_file(&bc_deflated_filename);
 
                 // See the bottom of back::write::run_passes for an explanation
-                // of when we do and don't keep .0.bc files around.
+                // of when we do and don't keep .#module-name#.bc files around.
                 let user_wants_numbered_bitcode =
                         sess.opts.output_types.contains_key(&OutputType::Bitcode) &&
                         sess.opts.cg.codegen_units > 1;
index 31bc11fb215b08fa4298ee84964058d4d07cd06a..69e4a50804fadc5ca4c7a5f917ff4452cf3e9987 100644 (file)
 use flate;
 
 use std::ffi::CString;
+use std::path::Path;
 
 pub fn run(sess: &session::Session, llmod: ModuleRef,
            tm: TargetMachineRef, reachable: &[String],
            config: &ModuleConfig,
-           name_extra: &str,
-           output_names: &config::OutputFilenames) {
+           temp_no_opt_bc_filename: &Path) {
     if sess.opts.cg.prefer_dynamic {
         sess.struct_err("cannot prefer dynamic linking when performing LTO")
             .note("only 'staticlib', 'bin', and 'cdylib' outputs are \
@@ -132,8 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
     }
 
     if sess.opts.cg.save_temps {
-        let path = output_names.with_extension(&format!("{}.no-opt.lto.bc", name_extra));
-        let cstr = path2cstr(&path);
+        let cstr = path2cstr(temp_no_opt_bc_filename);
         unsafe {
             llvm::LLVMWriteBitcodeToFile(llmod, cstr.as_ptr());
         }
index 170c8f75b5056c5ab9054bd7562623a197fabea4..ebb6e0baf20a00e8c837a18249c5e9853e706caf 100644 (file)
@@ -304,6 +304,19 @@ fn push(&mut self, text: &str) {
     }
 }
 
+pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+                                                    t: ty::Ty<'tcx>,
+                                                    prefix: &str)
+                                                    -> String {
+    let empty_def_path = DefPath {
+        data: vec![],
+        krate: cstore::LOCAL_CRATE,
+    };
+    let hash = get_symbol_hash(scx, &empty_def_path, t, &[]);
+    let path = [token::intern_and_get_ident(prefix)];
+    mangle(path.iter().cloned(), Some(&hash[..]))
+}
+
 /// Only symbols that are invisible outside their compilation unit should use a
 /// name generated by this function.
 pub fn internal_name_from_type_and_suffix<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
index ec20381d1890d1f55f5d6e543104503ac4542ae5..071960f1944cfe941a62467a88f9872b7354e2f0 100644 (file)
@@ -423,9 +423,9 @@ struct HandlerFreeVars<'a> {
 unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
                                mtrans: ModuleTranslation,
                                config: ModuleConfig,
-                               name_extra: String,
                                output_names: OutputFilenames) {
-    let ModuleTranslation { llmod, llcx } = mtrans;
+    let llmod = mtrans.llmod;
+    let llcx = mtrans.llcx;
     let tm = config.tm;
 
     // llcx doesn't outlive this function, so we can put this on the stack.
@@ -438,9 +438,10 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
     llvm::LLVMSetInlineAsmDiagnosticHandler(llcx, inline_asm_handler, fv);
     llvm::LLVMContextSetDiagnosticHandler(llcx, diagnostic_handler, fv);
 
+    let module_name = Some(&mtrans.name[..]);
+
     if config.emit_no_opt_bc {
-        let ext = format!("{}.no-opt.bc", name_extra);
-        let out = output_names.with_extension(&ext);
+        let out = output_names.temp_path_ext("no-opt.bc", module_name);
         let out = path2cstr(&out);
         llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
     }
@@ -512,13 +513,18 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext,
 
         match cgcx.lto_ctxt {
             Some((sess, reachable)) if sess.lto() =>  {
-                time(sess.time_passes(), "all lto passes", ||
-                     lto::run(sess, llmod, tm, reachable, &config,
-                              &name_extra, &output_names));
-
+                time(sess.time_passes(), "all lto passes", || {
+                    let temp_no_opt_bc_filename =
+                        output_names.temp_path_ext("no-opt.lto.bc", module_name);
+                    lto::run(sess,
+                             llmod,
+                             tm,
+                             reachable,
+                             &config,
+                             &temp_no_opt_bc_filename);
+                });
                 if config.emit_lto_bc {
-                    let name = format!("{}.lto.bc", name_extra);
-                    let out = output_names.with_extension(&name);
+                    let out = output_names.temp_path_ext("lto.bc", module_name);
                     let out = path2cstr(&out);
                     llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
                 }
@@ -556,8 +562,8 @@ unsafe fn with_codegen<F>(tm: TargetMachineRef,
     let write_obj = config.emit_obj && !config.obj_is_bitcode;
     let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode;
 
-    let bc_out = output_names.with_extension(&format!("{}.bc", name_extra));
-    let obj_out = output_names.with_extension(&format!("{}.o", name_extra));
+    let bc_out = output_names.temp_path(OutputType::Bitcode, module_name);
+    let obj_out = output_names.temp_path(OutputType::Object, module_name);
 
     if write_bc {
         let bc_out_c = path2cstr(&bc_out);
@@ -566,8 +572,7 @@ unsafe fn with_codegen<F>(tm: TargetMachineRef,
 
     time(config.time_passes, &format!("codegen passes [{}]", cgcx.worker), || {
         if config.emit_ir {
-            let ext = format!("{}.ll", name_extra);
-            let out = output_names.with_extension(&ext);
+            let out = output_names.temp_path(OutputType::LlvmAssembly, module_name);
             let out = path2cstr(&out);
             with_codegen(tm, llmod, config.no_builtins, |cpm| {
                 llvm::LLVMRustPrintModule(cpm, llmod, out.as_ptr());
@@ -576,7 +581,7 @@ unsafe fn with_codegen<F>(tm: TargetMachineRef,
         }
 
         if config.emit_asm {
-            let path = output_names.with_extension(&format!("{}.s", name_extra));
+            let path = output_names.temp_path(OutputType::Assembly, module_name);
 
             // We can't use the same module for asm and binary output, because that triggers
             // various errors like invalid IR or broken binaries, so we might have to clone the
@@ -713,27 +718,29 @@ pub fn run_passes(sess: &Session,
 
     {
         let work = build_work_item(sess,
-                                   trans.metadata_module,
+                                   trans.metadata_module.clone(),
                                    metadata_config.clone(),
-                                   crate_output.clone(),
-                                   "metadata".to_string());
+                                   crate_output.clone());
         work_items.push(work);
     }
 
-    for (index, mtrans) in trans.modules.iter().enumerate() {
+    for mtrans in trans.modules.iter() {
         let work = build_work_item(sess,
-                                   *mtrans,
+                                   mtrans.clone(),
                                    modules_config.clone(),
-                                   crate_output.clone(),
-                                   format!("{}", index));
+                                   crate_output.clone());
         work_items.push(work);
     }
 
     // Process the work items, optionally using worker threads.
-    if sess.opts.cg.codegen_units == 1 {
+    // NOTE: This code is not really adapted to incremental compilation where
+    //       the compiler decides the number of codegen units (and will
+    //       potentially create hundreds of them).
+    let num_workers = work_items.len() - 1;
+    if num_workers == 1 {
         run_work_singlethreaded(sess, &trans.reachable, work_items);
     } else {
-        run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units);
+        run_work_multithreaded(sess, work_items, num_workers);
     }
 
     // All codegen is finished.
@@ -748,32 +755,42 @@ pub fn run_passes(sess: &Session,
         }
     };
 
-    let copy_if_one_unit = |ext: &str,
-                            output_type: OutputType,
+    let copy_if_one_unit = |output_type: OutputType,
                             keep_numbered: bool| {
-        if sess.opts.cg.codegen_units == 1 {
+        if trans.modules.len() == 1 {
             // 1) Only one codegen unit.  In this case it's no difficulty
             //    to copy `foo.0.x` to `foo.x`.
-            copy_gracefully(&crate_output.with_extension(ext),
+            let module_name = Some(&(trans.modules[0].name)[..]);
+            let path = crate_output.temp_path(output_type, module_name);
+            copy_gracefully(&path,
                             &crate_output.path(output_type));
             if !sess.opts.cg.save_temps && !keep_numbered {
-                // The user just wants `foo.x`, not `foo.0.x`.
-                remove(sess, &crate_output.with_extension(ext));
+                // The user just wants `foo.x`, not `foo.#module-name#.x`.
+                remove(sess, &path);
             }
-        } else if crate_output.outputs.contains_key(&output_type) {
-            // 2) Multiple codegen units, with `--emit foo=some_name`.  We have
-            //    no good solution for this case, so warn the user.
-            sess.warn(&format!("ignoring emit path because multiple .{} files \
-                                were produced", ext));
-        } else if crate_output.single_output_file.is_some() {
-            // 3) Multiple codegen units, with `-o some_name`.  We have
-            //    no good solution for this case, so warn the user.
-            sess.warn(&format!("ignoring -o because multiple .{} files \
-                                were produced", ext));
         } else {
-            // 4) Multiple codegen units, but no explicit name.  We
-            //    just leave the `foo.0.x` files in place.
-            // (We don't have to do any work in this case.)
+            let ext = crate_output.temp_path(output_type, None)
+                                  .extension()
+                                  .unwrap()
+                                  .to_str()
+                                  .unwrap()
+                                  .to_owned();
+
+            if crate_output.outputs.contains_key(&output_type) {
+                // 2) Multiple codegen units, with `--emit foo=some_name`.  We have
+                //    no good solution for this case, so warn the user.
+                sess.warn(&format!("ignoring emit path because multiple .{} files \
+                                    were produced", ext));
+            } else if crate_output.single_output_file.is_some() {
+                // 3) Multiple codegen units, with `-o some_name`.  We have
+                //    no good solution for this case, so warn the user.
+                sess.warn(&format!("ignoring -o because multiple .{} files \
+                                    were produced", ext));
+            } else {
+                // 4) Multiple codegen units, but no explicit name.  We
+                //    just leave the `foo.0.x` files in place.
+                // (We don't have to do any work in this case.)
+            }
         }
     };
 
@@ -789,17 +806,17 @@ pub fn run_passes(sess: &Session,
                 // Copy to .bc, but always keep the .0.bc.  There is a later
                 // check to figure out if we should delete .0.bc files, or keep
                 // them for making an rlib.
-                copy_if_one_unit("0.bc", OutputType::Bitcode, true);
+                copy_if_one_unit(OutputType::Bitcode, true);
             }
             OutputType::LlvmAssembly => {
-                copy_if_one_unit("0.ll", OutputType::LlvmAssembly, false);
+                copy_if_one_unit(OutputType::LlvmAssembly, false);
             }
             OutputType::Assembly => {
-                copy_if_one_unit("0.s", OutputType::Assembly, false);
+                copy_if_one_unit(OutputType::Assembly, false);
             }
             OutputType::Object => {
                 user_wants_objects = true;
-                copy_if_one_unit("0.o", OutputType::Object, true);
+                copy_if_one_unit(OutputType::Object, true);
             }
             OutputType::Exe |
             OutputType::DepInfo => {}
@@ -810,51 +827,55 @@ pub fn run_passes(sess: &Session,
     // Clean up unwanted temporary files.
 
     // We create the following files by default:
-    //  - crate.0.bc
-    //  - crate.0.o
+    //  - crate.#module-name#.bc
+    //  - crate.#module-name#.o
     //  - crate.metadata.bc
     //  - crate.metadata.o
     //  - crate.o (linked from crate.##.o)
-    //  - crate.bc (copied from crate.0.bc)
+    //  - crate.bc (copied from crate.##.bc)
     // We may create additional files if requested by the user (through
     // `-C save-temps` or `--emit=` flags).
 
     if !sess.opts.cg.save_temps {
-        // Remove the temporary .0.o objects.  If the user didn't
+        // Remove the temporary .#module-name#.o objects.  If the user didn't
         // explicitly request bitcode (with --emit=bc), and the bitcode is not
-        // needed for building an rlib, then we must remove .0.bc as well.
+        // needed for building an rlib, then we must remove .#module-name#.bc as
+        // well.
 
-        // Specific rules for keeping .0.bc:
+        // Specific rules for keeping .#module-name#.bc:
         //  - If we're building an rlib (`needs_crate_bitcode`), then keep
         //    it.
         //  - If the user requested bitcode (`user_wants_bitcode`), and
         //    codegen_units > 1, then keep it.
         //  - If the user requested bitcode but codegen_units == 1, then we
-        //    can toss .0.bc because we copied it to .bc earlier.
+        //    can toss .#module-name#.bc because we copied it to .bc earlier.
         //  - If we're not building an rlib and the user didn't request
-        //    bitcode, then delete .0.bc.
+        //    bitcode, then delete .#module-name#.bc.
         // If you change how this works, also update back::link::link_rlib,
-        // where .0.bc files are (maybe) deleted after making an rlib.
+        // where .#module-name#.bc files are (maybe) deleted after making an
+        // rlib.
         let keep_numbered_bitcode = needs_crate_bitcode ||
                 (user_wants_bitcode && sess.opts.cg.codegen_units > 1);
 
         let keep_numbered_objects = needs_crate_object ||
                 (user_wants_objects && sess.opts.cg.codegen_units > 1);
 
-        for i in 0..trans.modules.len() {
+        for module_name in trans.modules.iter().map(|m| Some(&m.name[..])) {
             if modules_config.emit_obj && !keep_numbered_objects {
-                let ext = format!("{}.o", i);
-                remove(sess, &crate_output.with_extension(&ext));
+                let path = crate_output.temp_path(OutputType::Object, module_name);
+                remove(sess, &path);
             }
 
             if modules_config.emit_bc && !keep_numbered_bitcode {
-                let ext = format!("{}.bc", i);
-                remove(sess, &crate_output.with_extension(&ext));
+                let path = crate_output.temp_path(OutputType::Bitcode, module_name);
+                remove(sess, &path);
             }
         }
 
         if metadata_config.emit_bc && !user_wants_bitcode {
-            remove(sess, &crate_output.with_extension("metadata.bc"));
+            let path = crate_output.temp_path(OutputType::Bitcode,
+                                              Some(&trans.metadata_module.name[..]));
+            remove(sess, &path);
         }
     }
 
@@ -874,28 +895,31 @@ pub fn run_passes(sess: &Session,
 struct WorkItem {
     mtrans: ModuleTranslation,
     config: ModuleConfig,
-    output_names: OutputFilenames,
-    name_extra: String
+    output_names: OutputFilenames
 }
 
 fn build_work_item(sess: &Session,
                    mtrans: ModuleTranslation,
                    config: ModuleConfig,
-                   output_names: OutputFilenames,
-                   name_extra: String)
+                   output_names: OutputFilenames)
                    -> WorkItem
 {
     let mut config = config;
     config.tm = create_target_machine(sess);
-    WorkItem { mtrans: mtrans, config: config, output_names: output_names,
-               name_extra: name_extra }
+    WorkItem {
+        mtrans: mtrans,
+        config: config,
+        output_names: output_names
+    }
 }
 
 fn execute_work_item(cgcx: &CodegenContext,
                      work_item: WorkItem) {
     unsafe {
-        optimize_and_codegen(cgcx, work_item.mtrans, work_item.config,
-                             work_item.name_extra, work_item.output_names);
+        optimize_and_codegen(cgcx,
+                             work_item.mtrans,
+                             work_item.config,
+                             work_item.output_names);
     }
 }
 
@@ -914,6 +938,8 @@ fn run_work_singlethreaded(sess: &Session,
 fn run_work_multithreaded(sess: &Session,
                           work_items: Vec<WorkItem>,
                           num_workers: usize) {
+    assert!(num_workers > 0);
+
     // Run some workers to process the work items.
     let work_items_arc = Arc::new(Mutex::new(work_items));
     let mut diag_emitter = SharedEmitter::new();
@@ -981,7 +1007,7 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
     let (pname, mut cmd, _) = get_linker(sess);
 
     cmd.arg("-c").arg("-o").arg(&outputs.path(OutputType::Object))
-                           .arg(&outputs.temp_path(OutputType::Assembly));
+                           .arg(&outputs.temp_path(OutputType::Assembly, None));
     debug!("{:?}", cmd);
 
     match cmd.output() {
index 7a572fdadc3d795bdac78a20a3bb3ea80449ed96..c080d1f06d00f2a0d9a5bdc1d2f0f817f059d546 100644 (file)
@@ -25,8 +25,6 @@
 
 #![allow(non_camel_case_types)]
 
-pub use self::ValueOrigin::*;
-
 use super::CrateTranslation;
 use super::ModuleTranslation;
 
@@ -60,7 +58,7 @@
 use cleanup::{self, CleanupMethods, DropHint};
 use closure;
 use common::{Block, C_bool, C_bytes_in_context, C_i32, C_int, C_uint, C_integral};
-use collector::{self, TransItemState, TransItemCollectionMode};
+use collector::{self, TransItemCollectionMode};
 use common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef};
 use common::{CrateContext, DropFlagHintsMap, Field, FunctionContext};
 use common::{Result, NodeIdAndSpan, VariantInfo};
@@ -82,6 +80,7 @@
 use mir;
 use monomorphize::{self, Instance};
 use partitioning::{self, PartitioningStrategy, CodegenUnit};
+use symbol_map::SymbolMap;
 use symbol_names_test;
 use trans_item::TransItem;
 use tvec;
@@ -99,6 +98,7 @@
 use std::ffi::{CStr, CString};
 use std::cell::{Cell, RefCell};
 use std::collections::{HashMap, HashSet};
+use std::rc::Rc;
 use std::str;
 use std::{i8, i16, i32, i64};
 use syntax_pos::{Span, DUMMY_SP};
@@ -1407,19 +1407,17 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> {
     pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
                llfndecl: ValueRef,
                fn_ty: FnType,
-               definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi)>,
+               definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi, ast::NodeId)>,
                block_arena: &'blk TypedArena<common::BlockS<'blk, 'tcx>>)
                -> FunctionContext<'blk, 'tcx> {
-        let (param_substs, def_id) = match definition {
-            Some((instance, _, _)) => {
+        let (param_substs, def_id, inlined_id) = match definition {
+            Some((instance, _, _, inlined_id)) => {
                 common::validate_substs(instance.substs);
-                (instance.substs, Some(instance.def))
+                (instance.substs, Some(instance.def), Some(inlined_id))
             }
-            None => (ccx.tcx().mk_substs(Substs::empty()), None)
+            None => (ccx.tcx().mk_substs(Substs::empty()), None, None)
         };
 
-        let inlined_did = def_id.and_then(|def_id| inline::get_local_instance(ccx, def_id));
-        let inlined_id = inlined_did.and_then(|id| ccx.tcx().map.as_local_node_id(id));
         let local_id = def_id.and_then(|id| ccx.tcx().map.as_local_node_id(id));
 
         debug!("FunctionContext::new({})",
@@ -1454,7 +1452,7 @@ pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>,
         };
 
         let debug_context = if let (false, Some(definition)) = (no_debug, definition) {
-            let (instance, sig, abi) = definition;
+            let (instance, sig, abi, _) = definition;
             debuginfo::create_function_debug_context(ccx, instance, sig, abi, llfndecl)
         } else {
             debuginfo::empty_function_debug_context(ccx)
@@ -1832,10 +1830,6 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                closure_env: closure::ClosureEnv) {
     ccx.stats().n_closures.set(ccx.stats().n_closures.get() + 1);
 
-    if collector::collecting_debug_information(ccx.shared()) {
-        ccx.record_translation_item_as_generated(TransItem::Fn(instance));
-    }
-
     let _icx = push_ctxt("trans_closure");
     if !ccx.sess().no_landing_pads() {
         attributes::emit_uwtable(llfndecl, true);
@@ -1850,7 +1844,11 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let (arena, fcx): (TypedArena<_>, FunctionContext);
     arena = TypedArena::new();
-    fcx = FunctionContext::new(ccx, llfndecl, fn_ty, Some((instance, sig, abi)), &arena);
+    fcx = FunctionContext::new(ccx,
+                               llfndecl,
+                               fn_ty,
+                               Some((instance, sig, abi, inlined_id)),
+                               &arena);
 
     if fcx.mir.is_some() {
         return mir::trans_mir(&fcx);
@@ -1916,35 +1914,47 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
     fcx.finish(bcx, fn_cleanup_debug_loc.debug_loc());
 }
 
-/// Creates an LLVM function corresponding to a source language function.
-pub fn trans_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
-                          decl: &hir::FnDecl,
-                          body: &hir::Block,
-                          llfndecl: ValueRef,
-                          param_substs: &'tcx Substs<'tcx>,
-                          id: ast::NodeId) {
-    let _s = StatRecorder::new(ccx, ccx.tcx().node_path_str(id));
-    debug!("trans_fn(param_substs={:?})", param_substs);
-    let _icx = push_ctxt("trans_fn");
-    let def_id = if let Some(&def_id) = ccx.external_srcs().borrow().get(&id) {
-        def_id
-    } else {
-        ccx.tcx().map.local_def_id(id)
-    };
-    let fn_ty = ccx.tcx().lookup_item_type(def_id).ty;
-    let fn_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &fn_ty);
+pub fn trans_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, instance: Instance<'tcx>) {
+    let instance = inline::maybe_inline_instance(ccx, instance);
+
+    let fn_node_id = ccx.tcx().map.as_local_node_id(instance.def).unwrap();
+
+    let _s = StatRecorder::new(ccx, ccx.tcx().node_path_str(fn_node_id));
+    debug!("trans_instance(instance={:?})", instance);
+    let _icx = push_ctxt("trans_instance");
+
+    let item = ccx.tcx().map.find(fn_node_id).unwrap();
+
+    let fn_ty = ccx.tcx().lookup_item_type(instance.def).ty;
+    let fn_ty = ccx.tcx().erase_regions(&fn_ty);
+    let fn_ty = monomorphize::apply_param_substs(ccx.tcx(), instance.substs, &fn_ty);
+
     let sig = ccx.tcx().erase_late_bound_regions(fn_ty.fn_sig());
     let sig = ccx.tcx().normalize_associated_type(&sig);
     let abi = fn_ty.fn_abi();
-    trans_closure(ccx,
-                  decl,
-                  body,
-                  llfndecl,
-                  Instance::new(def_id, param_substs),
-                  id,
-                  &sig,
-                  abi,
-                  closure::ClosureEnv::NotClosure);
+
+    let lldecl = match ccx.instances().borrow().get(&instance) {
+        Some(&val) => val,
+        None => bug!("Instance `{:?}` not already declared", instance)
+    };
+
+    match item {
+        hir_map::NodeItem(&hir::Item {
+            node: hir::ItemFn(ref decl, _, _, _, _, ref body), ..
+        }) |
+        hir_map::NodeTraitItem(&hir::TraitItem {
+            node: hir::MethodTraitItem(
+                hir::MethodSig { ref decl, .. }, Some(ref body)), ..
+        }) |
+        hir_map::NodeImplItem(&hir::ImplItem {
+            node: hir::ImplItemKind::Method(
+                hir::MethodSig { ref decl, .. }, ref body), ..
+        }) => {
+            trans_closure(ccx, decl, body, lldecl, instance,
+                          fn_node_id, &sig, abi, closure::ClosureEnv::NotClosure);
+        }
+        _ => bug!("Instance is a {:?}?", item)
+    }
 }
 
 pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
@@ -2170,86 +2180,10 @@ pub fn llvm_linkage_by_name(name: &str) -> Option<Linkage> {
     }
 }
 
-
-/// Enum describing the origin of an LLVM `Value`, for linkage purposes.
-#[derive(Copy, Clone)]
-pub enum ValueOrigin {
-    /// The LLVM `Value` is in this context because the corresponding item was
-    /// assigned to the current compilation unit.
-    OriginalTranslation,
-    /// The `Value`'s corresponding item was assigned to some other compilation
-    /// unit, but the `Value` was translated in this context anyway because the
-    /// item is marked `#[inline]`.
-    InlinedCopy,
-}
-
-/// Set the appropriate linkage for an LLVM `ValueRef` (function or global).
-/// If the `llval` is the direct translation of a specific Rust item, `id`
-/// should be set to the `NodeId` of that item.  (This mapping should be
-/// 1-to-1, so monomorphizations and drop/visit glue should have `id` set to
-/// `None`.)  `llval_origin` indicates whether `llval` is the translation of an
-/// item assigned to `ccx`'s compilation unit or an inlined copy of an item
-/// assigned to a different compilation unit.
-pub fn update_linkage(ccx: &CrateContext,
-                      llval: ValueRef,
-                      id: Option<ast::NodeId>,
-                      llval_origin: ValueOrigin) {
-    match llval_origin {
-        InlinedCopy => {
-            // `llval` is a translation of an item defined in a separate
-            // compilation unit.  This only makes sense if there are at least
-            // two compilation units.
-            assert!(ccx.sess().opts.cg.codegen_units > 1 ||
-                    ccx.sess().opts.debugging_opts.incremental.is_some());
-            // `llval` is a copy of something defined elsewhere, so use
-            // `AvailableExternallyLinkage` to avoid duplicating code in the
-            // output.
-            llvm::SetLinkage(llval, llvm::AvailableExternallyLinkage);
-            return;
-        },
-        OriginalTranslation => {},
-    }
-
-    if let Some(id) = id {
-        let item = ccx.tcx().map.get(id);
-        if let hir_map::NodeItem(i) = item {
-            if let Some(name) = attr::first_attr_value_str_by_name(&i.attrs, "linkage") {
-                if let Some(linkage) = llvm_linkage_by_name(&name) {
-                    llvm::SetLinkage(llval, linkage);
-                } else {
-                    ccx.sess().span_fatal(i.span, "invalid linkage specified");
-                }
-                return;
-            }
-        }
-    }
-
-    let (is_reachable, is_generic) = if let Some(id) = id {
-        (ccx.reachable().contains(&id), false)
-    } else {
-        (false, true)
-    };
-
-    // We need external linkage for items reachable from other translation units, this include
-    // other codegen units in case of parallel compilations.
-    if is_reachable || ccx.sess().opts.cg.codegen_units > 1 {
-        if is_generic {
-            // This only happens with multiple codegen units, in which case we need to use weak_odr
-            // linkage because other crates might expose the same symbol. We cannot use
-            // linkonce_odr here because the symbol might then get dropped before the other codegen
-            // units get to link it.
-            llvm::SetUniqueComdat(ccx.llmod(), llval);
-            llvm::SetLinkage(llval, llvm::WeakODRLinkage);
-        } else {
-            llvm::SetLinkage(llval, llvm::ExternalLinkage);
-        }
-    } else {
-        llvm::SetLinkage(llval, llvm::InternalLinkage);
-    }
-}
-
-fn set_global_section(ccx: &CrateContext, llval: ValueRef, i: &hir::Item) {
-    if let Some(sect) = attr::first_attr_value_str_by_name(&i.attrs, "link_section") {
+pub fn set_link_section(ccx: &CrateContext,
+                        llval: ValueRef,
+                        attrs: &[ast::Attribute]) {
+    if let Some(sect) = attr::first_attr_value_str_by_name(attrs, "link_section") {
         if contains_null(&sect) {
             ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", &sect));
         }
@@ -2260,109 +2194,60 @@ fn set_global_section(ccx: &CrateContext, llval: ValueRef, i: &hir::Item) {
     }
 }
 
-pub fn trans_item(ccx: &CrateContext, item: &hir::Item) {
+fn trans_item(ccx: &CrateContext, item: &hir::Item) {
     let _icx = push_ctxt("trans_item");
 
-    let tcx = ccx.tcx();
-    let from_external = ccx.external_srcs().borrow().contains_key(&item.id);
-
     match item.node {
-        hir::ItemFn(ref decl, _, _, _, ref generics, ref body) => {
-            if !generics.is_type_parameterized() {
-                let trans_everywhere = attr::requests_inline(&item.attrs);
-                // Ignore `trans_everywhere` for cross-crate inlined items
-                // (`from_external`).  `trans_item` will be called once for each
-                // compilation unit that references the item, so it will still get
-                // translated everywhere it's needed.
-                for (ref ccx, is_origin) in ccx.maybe_iter(!from_external && trans_everywhere) {
-                    let def_id = tcx.map.local_def_id(item.id);
-                    let empty_substs = ccx.empty_substs_for_def_id(def_id);
-                    let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                    trans_fn(ccx, &decl, &body, llfn, empty_substs, item.id);
-                    set_global_section(ccx, llfn, item);
-                    update_linkage(ccx,
-                                   llfn,
-                                   Some(item.id),
-                                   if is_origin {
-                                       OriginalTranslation
-                                   } else {
-                                       InlinedCopy
-                                   });
-
-                    if is_entry_fn(ccx.sess(), item.id) {
-                        create_entry_wrapper(ccx, item.span, llfn);
-                        // check for the #[rustc_error] annotation, which forces an
-                        // error in trans. This is used to write compile-fail tests
-                        // that actually test that compilation succeeds without
-                        // reporting an error.
-                        if tcx.has_attr(def_id, "rustc_error") {
-                            tcx.sess.span_fatal(item.span, "compilation successful");
-                        }
-                    }
-                }
-            }
-        }
-        hir::ItemImpl(_, _, ref generics, _, _, ref impl_items) => {
-            // Both here and below with generic methods, be sure to recurse and look for
-            // items that we need to translate.
-            if !generics.ty_params.is_empty() {
-                return;
-            }
-
-            for impl_item in impl_items {
-                if let hir::ImplItemKind::Method(ref sig, ref body) = impl_item.node {
-                    if sig.generics.ty_params.is_empty() {
-                        let trans_everywhere = attr::requests_inline(&impl_item.attrs);
-                        for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
-                            let def_id = tcx.map.local_def_id(impl_item.id);
-                            let empty_substs = ccx.empty_substs_for_def_id(def_id);
-                            let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                            trans_fn(ccx, &sig.decl, body, llfn, empty_substs, impl_item.id);
-                            update_linkage(ccx, llfn, Some(impl_item.id),
-                                if is_origin {
-                                    OriginalTranslation
-                                } else {
-                                    InlinedCopy
-                                });
-                        }
-                    }
-                }
-            }
-        }
         hir::ItemEnum(ref enum_definition, ref gens) => {
             if gens.ty_params.is_empty() {
                 // sizes only make sense for non-generic types
                 enum_variant_size_lint(ccx, enum_definition, item.span, item.id);
             }
         }
-        hir::ItemStatic(_, m, ref expr) => {
-            let g = match consts::trans_static(ccx, m, expr, item.id, &item.attrs) {
-                Ok(g) => g,
-                Err(err) => ccx.tcx().sess.span_fatal(expr.span, &err.description()),
-            };
-            set_global_section(ccx, g, item);
-            update_linkage(ccx, g, Some(item.id), OriginalTranslation);
+        hir::ItemFn(..) |
+        hir::ItemImpl(..) |
+        hir::ItemStatic(..) => {
+            // Don't do anything here. Translation has been moved to
+            // being "collector-driven".
         }
         _ => {}
     }
 }
 
-pub fn is_entry_fn(sess: &Session, node_id: ast::NodeId) -> bool {
-    match *sess.entry_fn.borrow() {
-        Some((entry_id, _)) => node_id == entry_id,
-        None => false,
+/// Create the `main` function which will initialise the rust runtime and call
+/// users’ main function.
+pub fn maybe_create_entry_wrapper(ccx: &CrateContext) {
+    let (main_def_id, span) = match *ccx.sess().entry_fn.borrow() {
+        Some((id, span)) => {
+            (ccx.tcx().map.local_def_id(id), span)
+        }
+        None => return,
+    };
+
+    // check for the #[rustc_error] annotation, which forces an
+    // error in trans. This is used to write compile-fail tests
+    // that actually test that compilation succeeds without
+    // reporting an error.
+    if ccx.tcx().has_attr(main_def_id, "rustc_error") {
+        ccx.tcx().sess.span_fatal(span, "compilation successful");
+    }
+
+    let instance = Instance::mono(ccx.shared(), main_def_id);
+
+    if !ccx.codegen_unit().items.contains_key(&TransItem::Fn(instance)) {
+        // We want to create the wrapper in the same codegen unit as Rust's main
+        // function.
+        return;
     }
-}
 
-/// Create the `main` function which will initialise the rust runtime and call users’ main
-/// function.
-pub fn create_entry_wrapper(ccx: &CrateContext, sp: Span, main_llfn: ValueRef) {
+    let main_llfn = Callee::def(ccx, main_def_id, instance.substs).reify(ccx).val;
+
     let et = ccx.sess().entry_type.get().unwrap();
     match et {
         config::EntryMain => {
-            create_entry_fn(ccx, sp, main_llfn, true);
+            create_entry_fn(ccx, span, main_llfn, true);
         }
-        config::EntryStart => create_entry_fn(ccx, sp, main_llfn, false),
+        config::EntryStart => create_entry_fn(ccx, span, main_llfn, false),
         config::EntryNone => {}    // Do nothing.
     }
 
@@ -2483,16 +2368,16 @@ fn internalize_symbols(cx: &CrateContextList, reachable: &HashSet<&str>) {
                 let linkage = llvm::LLVMGetLinkage(val);
                 // We only care about external declarations (not definitions)
                 // and available_externally definitions.
-                if !(linkage == llvm::ExternalLinkage as c_uint &&
-                     llvm::LLVMIsDeclaration(val) != 0) &&
-                   !(linkage == llvm::AvailableExternallyLinkage as c_uint) {
-                    continue;
+                let is_available_externally = linkage == llvm::AvailableExternallyLinkage as c_uint;
+                let is_decl = llvm::LLVMIsDeclaration(val) != 0;
+
+                if is_decl || is_available_externally {
+                    let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
+                        .to_bytes()
+                        .to_vec();
+                    declared.insert(name);
                 }
 
-                let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
-                               .to_bytes()
-                               .to_vec();
-                declared.insert(name);
             }
         }
 
@@ -2502,21 +2387,27 @@ fn internalize_symbols(cx: &CrateContextList, reachable: &HashSet<&str>) {
         for ccx in cx.iter() {
             for val in iter_globals(ccx.llmod()).chain(iter_functions(ccx.llmod())) {
                 let linkage = llvm::LLVMGetLinkage(val);
+
+                let is_external = linkage == llvm::ExternalLinkage as c_uint;
+                let is_weak_odr = linkage == llvm::WeakODRLinkage as c_uint;
+                let is_decl = llvm::LLVMIsDeclaration(val) != 0;
+
                 // We only care about external definitions.
-                if !((linkage == llvm::ExternalLinkage as c_uint ||
-                      linkage == llvm::WeakODRLinkage as c_uint) &&
-                     llvm::LLVMIsDeclaration(val) == 0) {
-                    continue;
-                }
+                if (is_external || is_weak_odr) && !is_decl {
+
+                    let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
+                                .to_bytes()
+                                .to_vec();
+
+                    let is_declared = declared.contains(&name);
+                    let reachable = reachable.contains(str::from_utf8(&name).unwrap());
+
+                    if !is_declared && !reachable {
+                        llvm::SetLinkage(val, llvm::InternalLinkage);
+                        llvm::SetDLLStorageClass(val, llvm::DefaultStorageClass);
+                        llvm::UnsetComdat(val);
+                    }
 
-                let name = CStr::from_ptr(llvm::LLVMGetValueName(val))
-                               .to_bytes()
-                               .to_vec();
-                if !declared.contains(&name) &&
-                   !reachable.contains(str::from_utf8(&name).unwrap()) {
-                    llvm::SetLinkage(val, llvm::InternalLinkage);
-                    llvm::SetDLLStorageClass(val, llvm::DefaultStorageClass);
-                    llvm::UnsetComdat(val);
                 }
             }
         }
@@ -2610,8 +2501,8 @@ fn iter_functions(llmod: llvm::ModuleRef) -> ValueIter {
 ///
 /// This list is later used by linkers to determine the set of symbols needed to
 /// be exposed from a dynamic library and it's also encoded into the metadata.
-pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet {
-    scx.reachable().iter().map(|x| *x).filter(|&id| {
+pub fn filter_reachable_ids(tcx: TyCtxt, reachable: NodeSet) -> NodeSet {
+    reachable.into_iter().filter(|&id| {
         // Next, we want to ignore some FFI functions that are not exposed from
         // this crate. Reachable FFI functions can be lumped into two
         // categories:
@@ -2625,9 +2516,9 @@ pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet {
         //
         // As a result, if this id is an FFI item (foreign item) then we only
         // let it through if it's included statically.
-        match scx.tcx().map.get(id) {
+        match tcx.map.get(id) {
             hir_map::NodeForeignItem(..) => {
-                scx.sess().cstore.is_statically_included_foreign_item(id)
+                tcx.sess.cstore.is_statically_included_foreign_item(id)
             }
 
             // Only consider nodes that actually have exported symbols.
@@ -2637,8 +2528,8 @@ pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet {
                 node: hir::ItemFn(..), .. }) |
             hir_map::NodeImplItem(&hir::ImplItem {
                 node: hir::ImplItemKind::Method(..), .. }) => {
-                let def_id = scx.tcx().map.local_def_id(id);
-                let scheme = scx.tcx().lookup_item_type(def_id);
+                let def_id = tcx.map.local_def_id(id);
+                let scheme = tcx.lookup_item_type(def_id);
                 scheme.generics.types.is_empty()
             }
 
@@ -2660,6 +2551,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let krate = tcx.map.krate();
 
     let ty::CrateAnalysis { export_map, reachable, name, .. } = analysis;
+    let reachable = filter_reachable_ids(tcx, reachable);
 
     let check_overflow = if let Some(v) = tcx.sess.opts.debugging_opts.force_overflow_checks {
         v
@@ -2683,29 +2575,34 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                              reachable,
                                              check_overflow,
                                              check_dropflag);
-
-    let reachable_symbol_ids = filter_reachable_ids(&shared_ccx);
-
     // Translate the metadata.
     let metadata = time(tcx.sess.time_passes(), "write metadata", || {
-        write_metadata(&shared_ccx, &reachable_symbol_ids)
+        write_metadata(&shared_ccx, shared_ccx.reachable())
     });
 
     let metadata_module = ModuleTranslation {
+        name: "metadata".to_string(),
         llcx: shared_ccx.metadata_llcx(),
         llmod: shared_ccx.metadata_llmod(),
     };
     let no_builtins = attr::contains_name(&krate.attrs, "no_builtins");
 
-    let codegen_units = collect_and_partition_translation_items(&shared_ccx);
+    // Run the translation item collector and partition the collected items into
+    // codegen units.
+    let (codegen_units, symbol_map) = collect_and_partition_translation_items(&shared_ccx);
     let codegen_unit_count = codegen_units.len();
-    assert!(tcx.sess.opts.cg.codegen_units == codegen_unit_count ||
-            tcx.sess.opts.debugging_opts.incremental.is_some());
 
-    let crate_context_list = CrateContextList::new(&shared_ccx, codegen_units);
+    let symbol_map = Rc::new(symbol_map);
 
+    let crate_context_list = CrateContextList::new(&shared_ccx,
+                                                   codegen_units,
+                                                   symbol_map.clone());
     let modules = crate_context_list.iter()
-        .map(|ccx| ModuleTranslation { llcx: ccx.llcx(), llmod: ccx.llmod() })
+        .map(|ccx| ModuleTranslation {
+            name: String::from(&ccx.codegen_unit().name[..]),
+            llcx: ccx.llcx(),
+            llmod: ccx.llmod()
+        })
         .collect();
 
     // Skip crate items and just output metadata in -Z no-trans mode.
@@ -2722,26 +2619,30 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         };
     }
 
-    {
-        let ccx = crate_context_list.get_ccx(0);
+    // Instantiate translation items without filling out definitions yet...
+    for ccx in crate_context_list.iter() {
+        let trans_items = ccx.codegen_unit()
+                             .items_in_deterministic_order(tcx, &symbol_map);
 
-        // Translate all items. See `TransModVisitor` for
-        // details on why we walk in this particular way.
-        {
-            let _icx = push_ctxt("text");
-            intravisit::walk_mod(&mut TransItemsWithinModVisitor { ccx: &ccx }, &krate.module);
-            krate.visit_all_items(&mut TransModVisitor { ccx: &ccx });
+        for (trans_item, linkage) in trans_items {
+            trans_item.predefine(&ccx, linkage);
         }
-
-        collector::print_collection_results(ccx.shared());
-
-        symbol_names_test::report_symbol_names(&ccx);
     }
 
+    // ... and now that we have everything pre-defined, fill out those definitions.
     for ccx in crate_context_list.iter() {
-        if ccx.sess().opts.debuginfo != NoDebugInfo {
-            debuginfo::finalize(&ccx);
+        let trans_items = ccx.codegen_unit()
+                             .items_in_deterministic_order(tcx, &symbol_map);
+
+        for (trans_item, _) in trans_items {
+           trans_item.define(&ccx);
         }
+
+        // If this codegen unit contains the main function, also create the
+        // wrapper here
+        maybe_create_entry_wrapper(&ccx);
+
+        // Run replace-all-uses-with for statics that need it
         for &(old_g, new_g) in ccx.statics_to_rauw().borrow().iter() {
             unsafe {
                 let bitcast = llvm::LLVMConstPointerCast(new_g, llvm::LLVMTypeOf(old_g));
@@ -2749,6 +2650,26 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                 llvm::LLVMDeleteGlobal(old_g);
             }
         }
+
+        // Finalize debuginfo
+        if ccx.sess().opts.debuginfo != NoDebugInfo {
+            debuginfo::finalize(&ccx);
+        }
+    }
+
+    symbol_names_test::report_symbol_names(&shared_ccx);
+
+    {
+        let ccx = crate_context_list.get_ccx(0);
+
+        // FIXME: #34018
+        // At this point, we only walk the HIR for running
+        // enum_variant_size_lint(). This should arguably be moved somewhere
+        // else.
+        {
+            intravisit::walk_mod(&mut TransItemsWithinModVisitor { ccx: &ccx }, &krate.module);
+            krate.visit_all_items(&mut TransModVisitor { ccx: &ccx });
+        }
     }
 
     if shared_ccx.sess().trans_stats() {
@@ -2758,6 +2679,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         println!("n_null_glues: {}", stats.n_null_glues.get());
         println!("n_real_glues: {}", stats.n_real_glues.get());
 
+        println!("n_fallback_instantiations: {}", stats.n_fallback_instantiations.get());
+
         println!("n_fns: {}", stats.n_fns.get());
         println!("n_monos: {}", stats.n_monos.get());
         println!("n_inlines: {}", stats.n_inlines.get());
@@ -2774,6 +2697,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             }
         }
     }
+
     if shared_ccx.sess().count_llvm_insns() {
         for (k, v) in shared_ccx.stats().llvm_insns.borrow().iter() {
             println!("{:7} {}", *v, *k);
@@ -2781,10 +2705,11 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     let sess = shared_ccx.sess();
-    let mut reachable_symbols = reachable_symbol_ids.iter().map(|&id| {
+    let mut reachable_symbols = shared_ccx.reachable().iter().map(|&id| {
         let def_id = shared_ccx.tcx().map.local_def_id(id);
-        Instance::mono(&shared_ccx, def_id).symbol_name(&shared_ccx)
+        symbol_for_def_id(def_id, &shared_ccx, &symbol_map)
     }).collect::<Vec<_>>();
+
     if sess.entry_fn.borrow().is_some() {
         reachable_symbols.push("main".to_string());
     }
@@ -2806,7 +2731,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         reachable_symbols.extend(syms.into_iter().filter(|did| {
             sess.cstore.is_extern_item(shared_ccx.tcx(), *did)
         }).map(|did| {
-            Instance::mono(&shared_ccx, did).symbol_name(&shared_ccx)
+            symbol_for_def_id(did, &shared_ccx, &symbol_map)
         }));
     }
 
@@ -2821,6 +2746,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     let linker_info = LinkerInfo::new(&shared_ccx, &reachable_symbols);
+
     CrateTranslation {
         modules: modules,
         metadata_module: metadata_module,
@@ -2899,7 +2825,7 @@ fn visit_item(&mut self, i: &hir::Item) {
 }
 
 fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>)
-                                                     -> Vec<CodegenUnit<'tcx>> {
+                                                     -> (Vec<CodegenUnit<'tcx>>, SymbolMap<'tcx>) {
     let time_passes = scx.sess().time_passes();
 
     let collection_mode = match scx.sess().opts.debugging_opts.print_trans_items {
@@ -2922,10 +2848,13 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
         None => TransItemCollectionMode::Lazy
     };
 
-    let (items, inlining_map) = time(time_passes, "translation item collection", || {
-        collector::collect_crate_translation_items(&scx, collection_mode)
+    let (items, inlining_map) =
+        time(time_passes, "translation item collection", || {
+            collector::collect_crate_translation_items(&scx, collection_mode)
     });
 
+    let symbol_map = SymbolMap::build(scx, items.iter().cloned());
+
     let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() {
         PartitioningStrategy::PerModule
     } else {
@@ -2936,9 +2865,21 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
         partitioning::partition(scx.tcx(),
                                 items.iter().cloned(),
                                 strategy,
-                                &inlining_map)
+                                &inlining_map,
+                                scx.reachable())
     });
 
+    assert!(scx.tcx().sess.opts.cg.codegen_units == codegen_units.len() ||
+            scx.tcx().sess.opts.debugging_opts.incremental.is_some());
+
+    {
+        let mut ccx_map = scx.translation_items().borrow_mut();
+
+        for trans_item in items.iter().cloned() {
+            ccx_map.insert(trans_item);
+        }
+    }
+
     if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
         let mut item_to_cgus = HashMap::new();
 
@@ -2990,13 +2931,26 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
         for item in item_keys {
             println!("TRANS_ITEM {}", item);
         }
+    }
 
-        let mut ccx_map = scx.translation_items().borrow_mut();
+    (codegen_units, symbol_map)
+}
 
-        for cgi in items {
-            ccx_map.insert(cgi, TransItemState::PredictedButNotGenerated);
+fn symbol_for_def_id<'a, 'tcx>(def_id: DefId,
+                               scx: &SharedCrateContext<'a, 'tcx>,
+                               symbol_map: &SymbolMap<'tcx>)
+                               -> String {
+    // Just try to look things up in the symbol map. If nothing's there, we
+    // recompute.
+    if let Some(node_id) = scx.tcx().map.as_local_node_id(def_id) {
+        if let Some(sym) = symbol_map.get(TransItem::Static(node_id)) {
+            return sym.to_owned();
         }
     }
 
-    codegen_units
+    let instance = Instance::mono(scx, def_id);
+
+    symbol_map.get(TransItem::Fn(instance))
+              .map(str::to_owned)
+              .unwrap_or_else(|| instance.symbol_name(scx))
 }
index 9ea65532b35b61c8c3a1cd35deda4ca89861ad34..983ee564c35b1051fcc447b514d4719d05578707 100644 (file)
@@ -46,6 +46,7 @@
 use machine::llalign_of_min;
 use meth;
 use monomorphize::{self, Instance};
+use trans_item::TransItem;
 use type_::Type;
 use type_of;
 use value::Value;
@@ -302,7 +303,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
     let tcx = ccx.tcx();
 
     // Normalize the type for better caching.
-    let bare_fn_ty = tcx.erase_regions(&bare_fn_ty);
+    let bare_fn_ty = tcx.normalize_associated_type(&bare_fn_ty);
 
     // If this is an impl of `Fn` or `FnMut` trait, the receiver is `&self`.
     let is_by_ref = match closure_kind {
@@ -468,7 +469,7 @@ fn is_named_tuple_constructor(tcx: TyCtxt, def_id: DefId) -> bool {
         // Should be either intra-crate or inlined.
         assert_eq!(def_id.krate, LOCAL_CRATE);
 
-        let substs = tcx.mk_substs(substs.clone().erase_regions());
+        let substs = tcx.normalize_associated_type(&substs);
         let (val, fn_ty) = monomorphize::monomorphic_fn(ccx, def_id, substs);
         let fn_ptr_ty = match fn_ty.sty {
             ty::TyFnDef(_, _, fty) => {
@@ -536,13 +537,15 @@ fn is_named_tuple_constructor(tcx: TyCtxt, def_id: DefId) -> bool {
     // reference. It also occurs when testing libcore and in some
     // other weird situations. Annoying.
 
-    let sym = instance.symbol_name(ccx.shared());
+    let sym = ccx.symbol_map().get_or_compute(ccx.shared(),
+                                              TransItem::Fn(instance));
+
     let llptrty = type_of::type_of(ccx, fn_ptr_ty);
     let llfn = if let Some(llfn) = declare::get_declared_value(ccx, &sym) {
         if let Some(span) = local_item {
             if declare::get_defined_value(ccx, &sym).is_some() {
                 ccx.sess().span_fatal(span,
-                    &format!("symbol `{}` is already defined", sym));
+                    &format!("symbol `{}` is already defined", &sym));
             }
         }
 
index 9196cfce16feb3c85f2b937605b18c169f1689ea..b992ba362a98321ef64505b7858cff6f021b85ff 100644 (file)
@@ -10,7 +10,7 @@
 
 use arena::TypedArena;
 use back::symbol_names;
-use llvm::{ValueRef, get_param, get_params};
+use llvm::{self, ValueRef, get_param, get_params};
 use rustc::hir::def_id::DefId;
 use abi::{Abi, FnType};
 use adt;
@@ -167,7 +167,7 @@ fn get_or_create_closure_declaration<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             variadic: false
         })
     }));
-    let llfn = declare::define_internal_fn(ccx, &symbol, function_type);
+    let llfn = declare::declare_fn(ccx, &symbol, function_type);
 
     // set an inline hint for all closures
     attributes::inline(llfn, attributes::InlineAttr::Hint);
@@ -211,6 +211,8 @@ pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>,
            id, closure_def_id, closure_substs);
 
     let llfn = get_or_create_closure_declaration(ccx, closure_def_id, closure_substs);
+    llvm::SetLinkage(llfn, llvm::WeakODRLinkage);
+    llvm::SetUniqueComdat(ccx.llmod(), llfn);
 
     // Get the type of this closure. Use the current `param_substs` as
     // the closure substitutions. This makes sense because the closure
@@ -377,7 +379,7 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>(
     // Create the by-value helper.
     let function_name =
         symbol_names::internal_name_from_type_and_suffix(ccx, llonce_fn_ty, "once_shim");
-    let lloncefn = declare::define_internal_fn(ccx, &function_name, llonce_fn_ty);
+    let lloncefn = declare::declare_fn(ccx, &function_name, llonce_fn_ty);
     attributes::set_frame_pointer_elimination(ccx, lloncefn);
 
     let (block_arena, fcx): (TypedArena<_>, FunctionContext);
index eea6aec37260e9e918ed8417a43e850919871494..ba2cd2ba699926ca40e274f5e015f590b2c04f19 100644 (file)
 use syntax::abi::Abi;
 use errors;
 use syntax_pos::DUMMY_SP;
+use syntax::ast::NodeId;
 use base::custom_coerce_unsize_info;
 use context::SharedCrateContext;
 use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized};
@@ -349,17 +350,14 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
                 || format!("Could not find MIR for static: {:?}", def_id));
 
             let empty_substs = scx.empty_substs_for_def_id(def_id);
-            let mut visitor = MirNeighborCollector {
+            let visitor = MirNeighborCollector {
                 scx: scx,
                 mir: &mir,
                 output: &mut neighbors,
                 param_substs: empty_substs
             };
 
-            visitor.visit_mir(&mir);
-            for promoted in &mir.promoted {
-                visitor.visit_mir(promoted);
-            }
+            visit_mir_and_promoted(visitor, &mir);
         }
         TransItem::Fn(instance) => {
             // Keep track of the monomorphization recursion depth
@@ -372,17 +370,14 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
             let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(instance.def),
                 || format!("Could not find MIR for function: {}", instance));
 
-            let mut visitor = MirNeighborCollector {
+            let visitor = MirNeighborCollector {
                 scx: scx,
                 mir: &mir,
                 output: &mut neighbors,
                 param_substs: instance.substs
             };
 
-            visitor.visit_mir(&mir);
-            for promoted in &mir.promoted {
-                visitor.visit_mir(promoted);
-            }
+            visit_mir_and_promoted(visitor, &mir);
         }
     }
 
@@ -456,12 +451,25 @@ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>) {
         match *rvalue {
             mir::Rvalue::Aggregate(mir::AggregateKind::Closure(def_id,
                                                                ref substs), _) => {
-                assert!(can_have_local_instance(self.scx.tcx(), def_id));
-                let trans_item = create_fn_trans_item(self.scx.tcx(),
-                                                      def_id,
-                                                      substs.func_substs,
-                                                      self.param_substs);
-                self.output.push(trans_item);
+                let mir = errors::expect(self.scx.sess().diagnostic(),
+                                         self.scx.get_mir(def_id),
+                                         || {
+                    format!("Could not find MIR for closure: {:?}", def_id)
+                });
+
+                let concrete_substs = monomorphize::apply_param_substs(self.scx.tcx(),
+                                                                       self.param_substs,
+                                                                       &substs.func_substs);
+                let concrete_substs = self.scx.tcx().erase_regions(&concrete_substs);
+
+                let visitor = MirNeighborCollector {
+                    scx: self.scx,
+                    mir: &mir,
+                    output: self.output,
+                    param_substs: concrete_substs
+                };
+
+                visit_mir_and_promoted(visitor, &mir);
             }
             // When doing an cast from a regular pointer to a fat pointer, we
             // have to instantiate all methods of the trait being cast to, so we
@@ -624,7 +632,8 @@ fn visit_terminator_kind(&mut self,
                             let operand_ty = monomorphize::apply_param_substs(tcx,
                                                                               self.param_substs,
                                                                               &mt.ty);
-                            self.output.push(TransItem::DropGlue(DropGlueKind::Ty(operand_ty)));
+                            let ty = glue::get_drop_glue_type(tcx, operand_ty);
+                            self.output.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
                         } else {
                             bug!("Has the drop_in_place() intrinsic's signature changed?")
                         }
@@ -1070,7 +1079,6 @@ fn visit_item(&mut self, item: &'v hir::Item) {
             hir::ItemTy(..)          |
             hir::ItemDefaultImpl(..) |
             hir::ItemTrait(..)       |
-            hir::ItemConst(..)       |
             hir::ItemMod(..)         => {
                 // Nothing to do, just keep recursing...
             }
@@ -1107,9 +1115,14 @@ fn visit_item(&mut self, item: &'v hir::Item) {
                                         self.scx.tcx().map.local_def_id(item.id)));
                 self.output.push(TransItem::Static(item.id));
             }
-            hir::ItemFn(_, _, constness, _, ref generics, _) => {
-                if !generics.is_type_parameterized() &&
-                   constness == hir::Constness::NotConst {
+            hir::ItemConst(..) => {
+                debug!("RootCollector: ItemConst({})",
+                       def_id_to_string(self.scx.tcx(),
+                                        self.scx.tcx().map.local_def_id(item.id)));
+                add_roots_for_const_item(self.scx, item.id, self.output);
+            }
+            hir::ItemFn(_, _, _, _, ref generics, _) => {
+                if !generics.is_type_parameterized() {
                     let def_id = self.scx.tcx().map.local_def_id(item.id);
 
                     debug!("RootCollector: ItemFn({})",
@@ -1129,9 +1142,8 @@ fn visit_impl_item(&mut self, ii: &'v hir::ImplItem) {
         match ii.node {
             hir::ImplItemKind::Method(hir::MethodSig {
                 ref generics,
-                constness,
                 ..
-            }, _) if constness == hir::Constness::NotConst => {
+            }, _) => {
                 let hir_map = &self.scx.tcx().map;
                 let parent_node_id = hir_map.get_parent_node(ii.id);
                 let is_impl_generic = match hir_map.expect_item(parent_node_id) {
@@ -1228,111 +1240,34 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub enum TransItemState {
-    PredictedAndGenerated,
-    PredictedButNotGenerated,
-    NotPredictedButGenerated,
-}
+// There are no translation items for constants themselves but their
+// initializers might still contain something that produces translation items,
+// such as cast that introduce a new vtable.
+fn add_roots_for_const_item<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+                                      const_item_node_id: NodeId,
+                                      output: &mut Vec<TransItem<'tcx>>)
+{
+    let def_id = scx.tcx().map.local_def_id(const_item_node_id);
+
+    // Scan the MIR in order to find function calls, closures, and
+    // drop-glue
+    let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(def_id),
+        || format!("Could not find MIR for const: {:?}", def_id));
+
+    let empty_substs = scx.empty_substs_for_def_id(def_id);
+    let visitor = MirNeighborCollector {
+        scx: scx,
+        mir: &mir,
+        output: output,
+        param_substs: empty_substs
+    };
 
-pub fn collecting_debug_information(scx: &SharedCrateContext) -> bool {
-    return cfg!(debug_assertions) &&
-           scx.sess().opts.debugging_opts.print_trans_items.is_some();
+    visit_mir_and_promoted(visitor, &mir);
 }
 
-pub fn print_collection_results<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>) {
-    use std::hash::{Hash, SipHasher, Hasher};
-
-    if !collecting_debug_information(scx) {
-        return;
-    }
-
-    fn hash<T: Hash>(t: &T) -> u64 {
-        let mut s = SipHasher::new();
-        t.hash(&mut s);
-        s.finish()
-    }
-
-    let trans_items = scx.translation_items().borrow();
-
-    {
-        // Check for duplicate item keys
-        let mut item_keys = FnvHashMap();
-
-        for (item, item_state) in trans_items.iter() {
-            let k = item.to_string(scx.tcx());
-
-            if item_keys.contains_key(&k) {
-                let prev: (TransItem, TransItemState) = item_keys[&k];
-                debug!("DUPLICATE KEY: {}", k);
-                debug!(" (1) {:?}, {:?}, hash: {}, raw: {}",
-                       prev.0,
-                       prev.1,
-                       hash(&prev.0),
-                       prev.0.to_raw_string());
-
-                debug!(" (2) {:?}, {:?}, hash: {}, raw: {}",
-                       *item,
-                       *item_state,
-                       hash(item),
-                       item.to_raw_string());
-            } else {
-                item_keys.insert(k, (*item, *item_state));
-            }
-        }
-    }
-
-    let mut predicted_but_not_generated = FnvHashSet();
-    let mut not_predicted_but_generated = FnvHashSet();
-    let mut predicted = FnvHashSet();
-    let mut generated = FnvHashSet();
-
-    for (item, item_state) in trans_items.iter() {
-        let item_key = item.to_string(scx.tcx());
-
-        match *item_state {
-            TransItemState::PredictedAndGenerated => {
-                predicted.insert(item_key.clone());
-                generated.insert(item_key);
-            }
-            TransItemState::PredictedButNotGenerated => {
-                predicted_but_not_generated.insert(item_key.clone());
-                predicted.insert(item_key);
-            }
-            TransItemState::NotPredictedButGenerated => {
-                not_predicted_but_generated.insert(item_key.clone());
-                generated.insert(item_key);
-            }
-        }
-    }
-
-    debug!("Total number of translation items predicted: {}", predicted.len());
-    debug!("Total number of translation items generated: {}", generated.len());
-    debug!("Total number of translation items predicted but not generated: {}",
-           predicted_but_not_generated.len());
-    debug!("Total number of translation items not predicted but generated: {}",
-           not_predicted_but_generated.len());
-
-    if generated.len() > 0 {
-        debug!("Failed to predict {}% of translation items",
-               (100 * not_predicted_but_generated.len()) / generated.len());
-    }
-    if generated.len() > 0 {
-        debug!("Predict {}% too many translation items",
-               (100 * predicted_but_not_generated.len()) / generated.len());
-    }
-
-    debug!("");
-    debug!("Not predicted but generated:");
-    debug!("============================");
-    for item in not_predicted_but_generated {
-        debug!(" - {}", item);
-    }
-
-    debug!("");
-    debug!("Predicted but not generated:");
-    debug!("============================");
-    for item in predicted_but_not_generated {
-        debug!(" - {}", item);
+fn visit_mir_and_promoted<'tcx, V: MirVisitor<'tcx>>(mut visitor: V, mir: &mir::Mir<'tcx>) {
+    visitor.visit_mir(&mir);
+    for promoted in &mir.promoted {
+        visitor.visit_mir(promoted);
     }
 }
index 5596ab0d819e0a3470f7b6a2e360a0c72de87fa2..5732fded362f58fd132c16b8313be3ac021783b9 100644 (file)
@@ -21,7 +21,6 @@
 use {abi, adt, closure, debuginfo, expr, machine};
 use base::{self, push_ctxt};
 use callee::Callee;
-use collector;
 use trans_item::TransItem;
 use common::{type_is_sized, C_nil, const_get_elt};
 use common::{CrateContext, C_integral, C_floating, C_bool, C_str_slice, C_bytes, val_ty};
@@ -1013,31 +1012,41 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
         return Datum::new(g, ty, Lvalue::new("static"));
     }
 
-    let sym = instance.symbol_name(ccx.shared());
-
     let g = if let Some(id) = ccx.tcx().map.as_local_node_id(def_id) {
+
         let llty = type_of::type_of(ccx, ty);
-        match ccx.tcx().map.get(id) {
+        let (g, attrs) = match ccx.tcx().map.get(id) {
             hir_map::NodeItem(&hir::Item {
-                span, node: hir::ItemStatic(..), ..
+                ref attrs, span, node: hir::ItemStatic(..), ..
             }) => {
-                // If this static came from an external crate, then
-                // we need to get the symbol from metadata instead of
-                // using the current crate's name/version
-                // information in the hash of the symbol
-                debug!("making {}", sym);
-
-                // Create the global before evaluating the initializer;
-                // this is necessary to allow recursive statics.
-                declare::define_global(ccx, &sym, llty).unwrap_or_else(|| {
-                    ccx.sess().span_fatal(span,
-                        &format!("symbol `{}` is already defined", sym))
-                })
+                let sym = ccx.symbol_map()
+                             .get(TransItem::Static(id))
+                             .expect("Local statics should always be in the SymbolMap");
+                // Make sure that this is never executed for something inlined.
+                assert!(!ccx.external_srcs().borrow().contains_key(&id));
+
+                let defined_in_current_codegen_unit = ccx.codegen_unit()
+                                                         .items
+                                                         .contains_key(&TransItem::Static(id));
+                if defined_in_current_codegen_unit {
+                    if declare::get_declared_value(ccx, sym).is_none() {
+                        span_bug!(span, "trans: Static not properly pre-defined?");
+                    }
+                } else {
+                    if declare::get_declared_value(ccx, sym).is_some() {
+                        span_bug!(span, "trans: Conflicting symbol names for static?");
+                    }
+                }
+
+                let g = declare::define_global(ccx, sym, llty).unwrap();
+
+                (g, attrs)
             }
 
             hir_map::NodeForeignItem(&hir::ForeignItem {
                 ref attrs, span, node: hir::ForeignItemStatic(..), ..
             }) => {
+                let sym = instance.symbol_name(ccx.shared());
                 let g = if let Some(name) =
                         attr::first_attr_value_str_by_name(&attrs, "linkage") {
                     // If this is a static with a linkage specified, then we need to handle
@@ -1072,7 +1081,7 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
                         real_name.push_str(&sym);
                         let g2 = declare::define_global(ccx, &real_name, llty).unwrap_or_else(||{
                             ccx.sess().span_fatal(span,
-                                &format!("symbol `{}` is already defined", sym))
+                                &format!("symbol `{}` is already defined", &sym))
                         });
                         llvm::SetLinkage(g2, llvm::InternalLinkage);
                         llvm::LLVMSetInitializer(g2, g1);
@@ -1083,18 +1092,22 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId)
                     declare::declare_global(ccx, &sym, llty)
                 };
 
-                for attr in attrs {
-                    if attr.check_name("thread_local") {
-                        llvm::set_thread_local(g, true);
-                    }
-                }
-
-                g
+                (g, attrs)
             }
 
             item => bug!("get_static: expected static, found {:?}", item)
+        };
+
+        for attr in attrs {
+            if attr.check_name("thread_local") {
+                llvm::set_thread_local(g, true);
+            }
         }
+
+        g
     } else {
+        let sym = instance.symbol_name(ccx.shared());
+
         // FIXME(nagisa): perhaps the map of externs could be offloaded to llvm somehow?
         // FIXME(nagisa): investigate whether it can be changed into define_global
         let g = declare::declare_global(ccx, &sym, type_of::type_of(ccx, ty));
@@ -1126,11 +1139,6 @@ pub fn trans_static(ccx: &CrateContext,
                     id: ast::NodeId,
                     attrs: &[ast::Attribute])
                     -> Result<ValueRef, ConstEvalErr> {
-
-    if collector::collecting_debug_information(ccx.shared()) {
-        ccx.record_translation_item_as_generated(TransItem::Static(id));
-    }
-
     unsafe {
         let _icx = push_ctxt("trans_static");
         let def_id = ccx.tcx().map.local_def_id(id);
@@ -1197,6 +1205,9 @@ pub fn trans_static(ccx: &CrateContext,
                                "thread_local") {
             llvm::set_thread_local(g, true);
         }
+
+        base::set_link_section(ccx, g, attrs);
+
         Ok(g)
     }
 }
index bfcb1ae33b3019f000314b4e25005f23f1a839aa..b8d231db40a2af48aeda9bc1061b5f3f580f20a2 100644 (file)
 use monomorphize::Instance;
 
 use partitioning::CodegenUnit;
-use collector::TransItemState;
 use trans_item::TransItem;
 use type_::{Type, TypeNames};
 use rustc::ty::subst::{Substs, VecPerParamSpace};
 use rustc::ty::{self, Ty, TyCtxt};
 use session::config::NoDebugInfo;
 use session::Session;
+use symbol_map::SymbolMap;
 use util::sha2::Sha256;
 use util::nodemap::{NodeMap, NodeSet, DefIdMap, FnvHashMap, FnvHashSet};
 
 use std::str;
 use syntax::ast;
 use syntax::parse::token::InternedString;
+use abi::FnType;
 
 pub struct Stats {
     pub n_glues_created: Cell<usize>,
     pub n_null_glues: Cell<usize>,
     pub n_real_glues: Cell<usize>,
+    pub n_fallback_instantiations: Cell<usize>,
     pub n_fns: Cell<usize>,
     pub n_monos: Cell<usize>,
     pub n_inlines: Cell<usize>,
@@ -80,11 +82,9 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
     mir_map: &'a MirMap<'tcx>,
     mir_cache: RefCell<DefIdMap<Rc<mir::Mir<'tcx>>>>,
 
-    available_monomorphizations: RefCell<FnvHashSet<String>>,
-    available_drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, String>>,
     use_dll_storage_attrs: bool,
 
-    translation_items: RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>>,
+    translation_items: RefCell<FnvHashSet<TransItem<'tcx>>>,
     trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>,
 }
 
@@ -99,7 +99,7 @@ pub struct LocalCrateContext<'tcx> {
     codegen_unit: CodegenUnit<'tcx>,
     needs_unwind_cleanup_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>,
     fn_pointer_shims: RefCell<FnvHashMap<Ty<'tcx>, ValueRef>>,
-    drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, ValueRef>>,
+    drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>>,
     /// Track mapping of external ids to local items imported for inlining
     external: RefCell<DefIdMap<Option<ast::NodeId>>>,
     /// Backwards version of the `external` map (inlined items to where they
@@ -172,6 +172,8 @@ pub struct LocalCrateContext<'tcx> {
 
     /// Depth of the current type-of computation - used to bail out
     type_of_depth: Cell<usize>,
+
+    symbol_map: Rc<SymbolMap<'tcx>>,
 }
 
 // Implement DepTrackingMapConfig for `trait_cache`
@@ -198,12 +200,13 @@ pub struct CrateContextList<'a, 'tcx: 'a> {
 impl<'a, 'tcx: 'a> CrateContextList<'a, 'tcx> {
 
     pub fn new(shared_ccx: &'a SharedCrateContext<'a, 'tcx>,
-               codegen_units: Vec<CodegenUnit<'tcx>>)
+               codegen_units: Vec<CodegenUnit<'tcx>>,
+               symbol_map: Rc<SymbolMap<'tcx>>)
                -> CrateContextList<'a, 'tcx> {
         CrateContextList {
             shared: shared_ccx,
             local_ccxs: codegen_units.into_iter().map(|codegen_unit| {
-                LocalCrateContext::new(shared_ccx, codegen_unit)
+                LocalCrateContext::new(shared_ccx, codegen_unit, symbol_map.clone())
             }).collect()
         }
     }
@@ -403,6 +406,7 @@ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
                 n_glues_created: Cell::new(0),
                 n_null_glues: Cell::new(0),
                 n_real_glues: Cell::new(0),
+                n_fallback_instantiations: Cell::new(0),
                 n_fns: Cell::new(0),
                 n_monos: Cell::new(0),
                 n_inlines: Cell::new(0),
@@ -413,10 +417,8 @@ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
             },
             check_overflow: check_overflow,
             check_drop_flag_for_sanity: check_drop_flag_for_sanity,
-            available_monomorphizations: RefCell::new(FnvHashSet()),
-            available_drop_glues: RefCell::new(FnvHashMap()),
             use_dll_storage_attrs: use_dll_storage_attrs,
-            translation_items: RefCell::new(FnvHashMap()),
+            translation_items: RefCell::new(FnvHashSet()),
             trait_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
         }
     }
@@ -479,7 +481,7 @@ pub fn get_mir(&self, def_id: DefId) -> Option<CachedMir<'b, 'tcx>> {
         }
     }
 
-    pub fn translation_items(&self) -> &RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>> {
+    pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> {
         &self.translation_items
     }
 
@@ -515,7 +517,8 @@ pub fn metadata_symbol_name(&self) -> String {
 
 impl<'tcx> LocalCrateContext<'tcx> {
     fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>,
-               codegen_unit: CodegenUnit<'tcx>)
+               codegen_unit: CodegenUnit<'tcx>,
+               symbol_map: Rc<SymbolMap<'tcx>>)
            -> LocalCrateContext<'tcx> {
         unsafe {
             // Append ".rs" to LLVM module identifier.
@@ -574,6 +577,7 @@ fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>,
                 intrinsics: RefCell::new(FnvHashMap()),
                 n_llvm_insns: Cell::new(0),
                 type_of_depth: Cell::new(0),
+                symbol_map: symbol_map,
             };
 
             let (int_type, opaque_vec_type, str_slice_ty, mut local_ccx) = {
@@ -730,7 +734,8 @@ pub fn fn_pointer_shims(&self) -> &RefCell<FnvHashMap<Ty<'tcx>, ValueRef>> {
         &self.local().fn_pointer_shims
     }
 
-    pub fn drop_glues<'a>(&'a self) -> &'a RefCell<FnvHashMap<DropGlueKind<'tcx>, ValueRef>> {
+    pub fn drop_glues<'a>(&'a self)
+                          -> &'a RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>> {
         &self.local().drop_glues
     }
 
@@ -816,14 +821,6 @@ pub fn stats<'a>(&'a self) -> &'a Stats {
         &self.shared.stats
     }
 
-    pub fn available_monomorphizations<'a>(&'a self) -> &'a RefCell<FnvHashSet<String>> {
-        &self.shared.available_monomorphizations
-    }
-
-    pub fn available_drop_glues(&self) -> &RefCell<FnvHashMap<DropGlueKind<'tcx>, String>> {
-        &self.shared.available_drop_glues
-    }
-
     pub fn int_type(&self) -> Type {
         self.local().int_type
     }
@@ -900,22 +897,12 @@ pub fn get_mir(&self, def_id: DefId) -> Option<CachedMir<'b, 'tcx>> {
         self.shared.get_mir(def_id)
     }
 
-    pub fn translation_items(&self) -> &RefCell<FnvHashMap<TransItem<'tcx>, TransItemState>> {
-        &self.shared.translation_items
+    pub fn symbol_map(&self) -> &SymbolMap<'tcx> {
+        &*self.local().symbol_map
     }
 
-    pub fn record_translation_item_as_generated(&self, cgi: TransItem<'tcx>) {
-        if self.sess().opts.debugging_opts.print_trans_items.is_none() {
-            return;
-        }
-
-        let mut codegen_items = self.translation_items().borrow_mut();
-
-        if codegen_items.contains_key(&cgi) {
-            codegen_items.insert(cgi, TransItemState::PredictedAndGenerated);
-        } else {
-            codegen_items.insert(cgi, TransItemState::NotPredictedButGenerated);
-        }
+    pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> {
+        &self.shared.translation_items
     }
 
     /// Given the def-id of some item that has no type parameters, make
index e6db695943bbee5b5ae940255b31ed3e108f2c5b..2746d3fb6b0b6788e76397504e968b60d460491f 100644 (file)
@@ -138,24 +138,34 @@ pub fn define_global(ccx: &CrateContext, name: &str, ty: Type) -> Option<ValueRe
     }
 }
 
-
 /// Declare a Rust function with an intention to define it.
 ///
 /// Use this function when you intend to define a function. This function will
 /// return panic if the name already has a definition associated with it. This
 /// can happen with #[no_mangle] or #[export_name], for example.
-pub fn define_internal_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
-                                    name: &str,
-                                    fn_type: ty::Ty<'tcx>) -> ValueRef {
+pub fn define_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                           name: &str,
+                           fn_type: ty::Ty<'tcx>) -> ValueRef {
     if get_defined_value(ccx, name).is_some() {
         ccx.sess().fatal(&format!("symbol `{}` already defined", name))
     } else {
-        let llfn = declare_fn(ccx, name, fn_type);
-        llvm::SetLinkage(llfn, llvm::InternalLinkage);
-        llfn
+        declare_fn(ccx, name, fn_type)
     }
 }
 
+/// Declare a Rust function with an intention to define it.
+///
+/// Use this function when you intend to define a function. This function will
+/// return panic if the name already has a definition associated with it. This
+/// can happen with #[no_mangle] or #[export_name], for example.
+pub fn define_internal_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                                    name: &str,
+                                    fn_type: ty::Ty<'tcx>) -> ValueRef {
+    let llfn = define_fn(ccx, name, fn_type);
+    llvm::SetLinkage(llfn, llvm::InternalLinkage);
+    llfn
+}
+
 
 /// Get declared value by name.
 pub fn get_declared_value(ccx: &CrateContext, name: &str) -> Option<ValueRef> {
index d36878b03322a30fbe24c4c98c66bdaed168a242..f7f065a3562ed2bb8223a1741af97490884abd47 100644 (file)
 
 register_long_diagnostics! {
 
-E0510: r##"
-`return_address` was used in an invalid context. Erroneous code example:
-
-```ignore
-#![feature(intrinsics)]
-
-extern "rust-intrinsic" {
-    fn return_address() -> *const u8;
-}
-
-unsafe fn by_value() -> i32 {
-    let _ = return_address();
-    // error: invalid use of `return_address` intrinsic: function does
-    //        not use out pointer
-    0
-}
-```
-
-Return values may be stored in a return register(s) or written into a so-called
-out pointer. In case the returned value is too big (this is
-target-ABI-dependent and generally not portable or future proof) to fit into
-the return register(s), the compiler will return the value by writing it into
-space allocated in the caller's stack frame. Example:
-
-```
-#![feature(intrinsics)]
-
-extern "rust-intrinsic" {
-    fn return_address() -> *const u8;
-}
-
-unsafe fn by_pointer() -> String {
-    let _ = return_address();
-    String::new() // ok!
-}
-```
-"##,
-
 E0511: r##"
 Invalid monomorphization of an intrinsic function was used. Erroneous code
 example:
index 71c6cba9cc22a6866aa221368c44cf21950a94ff..b8dd7273a8331df4ac0c1fec8dbbe3572c25cf2e 100644 (file)
@@ -1695,11 +1695,13 @@ fn trans_scalar_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
 }
 
 // refinement types would obviate the need for this
+#[derive(Clone, Copy)]
 enum lazy_binop_ty {
     lazy_and,
     lazy_or,
 }
 
+
 fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
                                 binop_expr: &hir::Expr,
                                 op: lazy_binop_ty,
@@ -1717,6 +1719,17 @@ fn trans_lazy_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
         return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
     }
 
+    // If the rhs can never be reached, don't generate code for it.
+    if let Some(cond_val) = const_to_opt_uint(lhs) {
+        match (cond_val, op) {
+            (0, lazy_and) |
+            (1, lazy_or)  => {
+                return immediate_rvalue_bcx(past_lhs, lhs, binop_ty).to_expr_datumblock();
+            }
+            _ => { /* continue */ }
+        }
+    }
+
     let join = fcx.new_id_block("join", binop_expr.id);
     let before_rhs = fcx.new_id_block("before_rhs", b.id);
 
index ac23d713d2727936f91b8cdc042b656363920960..ef7d0ea165d609d4487e50ece1e65a2869d61940 100644 (file)
 
 use std;
 
-use attributes;
-use back::symbol_names;
 use llvm;
 use llvm::{ValueRef, get_param};
 use middle::lang_items::ExchangeFreeFnLangItem;
 use rustc::ty::subst::{Substs};
 use rustc::traits;
 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
-use abi::{Abi, FnType};
 use adt;
 use adt::GetDtorType; // for tcx.dtor_type()
 use base::*;
 use callee::{Callee, ArgVals};
 use cleanup;
 use cleanup::CleanupMethods;
-use collector;
 use common::*;
 use debuginfo::DebugLoc;
-use declare;
 use expr;
 use machine::*;
 use monomorphize;
@@ -236,48 +231,43 @@ pub fn map_ty<F>(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>)
 
 fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                 g: DropGlueKind<'tcx>) -> ValueRef {
-    debug!("make drop glue for {:?}", g);
     let g = g.map_ty(|t| get_drop_glue_type(ccx.tcx(), t));
-    debug!("drop glue type {:?}", g);
     match ccx.drop_glues().borrow().get(&g) {
-        Some(&glue) => return glue,
-        _ => { }
+        Some(&(glue, _)) => return glue,
+        None => {
+            debug!("Could not find drop glue for {:?} -- {} -- {}. \
+                    Falling back to on-demand instantiation.",
+                    g,
+                    TransItem::DropGlue(g).to_raw_string(),
+                    ccx.codegen_unit().name);
+
+            ccx.stats().n_fallback_instantiations.set(ccx.stats()
+                                                         .n_fallback_instantiations
+                                                         .get() + 1);
+        }
     }
-    let t = g.ty();
 
-    let tcx = ccx.tcx();
-    let sig = ty::FnSig {
-        inputs: vec![tcx.mk_mut_ptr(tcx.types.i8)],
-        output: ty::FnOutput::FnConverging(tcx.mk_nil()),
-        variadic: false,
-    };
-    // Create a FnType for fn(*mut i8) and substitute the real type in
-    // later - that prevents FnType from splitting fat pointers up.
-    let mut fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]);
-    fn_ty.args[0].original_ty = type_of(ccx, t).ptr_to();
-    let llfnty = fn_ty.llvm_type(ccx);
-
-    // To avoid infinite recursion, don't `make_drop_glue` until after we've
-    // added the entry to the `drop_glues` cache.
-    if let Some(old_sym) = ccx.available_drop_glues().borrow().get(&g) {
-        let llfn = declare::declare_cfn(ccx, &old_sym, llfnty);
-        ccx.drop_glues().borrow_mut().insert(g, llfn);
-        return llfn;
-    };
+    // FIXME: #34151
+    // Normally, getting here would indicate a bug in trans::collector,
+    // since it seems to have missed a translation item. When we are
+    // translating with non-MIR-based trans, however, the results of the
+    // collector are not entirely reliable since it bases its analysis
+    // on MIR. Thus, we'll instantiate the missing function on demand in
+    // this codegen unit, so that things keep working.
 
-    let suffix = match g {
-        DropGlueKind::Ty(_) => "drop",
-        DropGlueKind::TyContents(_) => "drop_contents",
-    };
+    TransItem::DropGlue(g).predefine(ccx, llvm::InternalLinkage);
+    TransItem::DropGlue(g).define(ccx);
 
-    let fn_nm = symbol_names::internal_name_from_type_and_suffix(ccx, t, suffix);
-    assert!(declare::get_defined_value(ccx, &fn_nm).is_none());
-    let llfn = declare::declare_cfn(ccx, &fn_nm, llfnty);
-    attributes::set_frame_pointer_elimination(ccx, llfn);
-    ccx.available_drop_glues().borrow_mut().insert(g, fn_nm);
-    ccx.drop_glues().borrow_mut().insert(g, llfn);
+    // Now that we made sure that the glue function is in ccx.drop_glues,
+    // give it another try
+    get_drop_glue_core(ccx, g)
+}
 
-    let _s = StatRecorder::new(ccx, format!("drop {:?}", t));
+pub fn implement_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                                     g: DropGlueKind<'tcx>) {
+    let tcx = ccx.tcx();
+    assert_eq!(g.ty(), get_drop_glue_type(tcx, g.ty()));
+    let (llfn, fn_ty) = ccx.drop_glues().borrow().get(&g).unwrap().clone();
 
     let (arena, fcx): (TypedArena<_>, FunctionContext);
     arena = TypedArena::new();
@@ -285,8 +275,6 @@ fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let bcx = fcx.init(false, None);
 
-    update_linkage(ccx, llfn, None, OriginalTranslation);
-
     ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
     // All glue functions take values passed *by alias*; this is a
     // requirement since in many contexts glue is invoked indirectly and
@@ -298,10 +286,9 @@ fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
 
     let bcx = make_drop_glue(bcx, get_param(llfn, 0), g);
     fcx.finish(bcx, DebugLoc::None);
-
-    llfn
 }
 
+
 fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
                                       t: Ty<'tcx>,
                                       struct_data: ValueRef)
@@ -494,11 +481,6 @@ pub fn size_and_align_of_dst<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>,
 
 fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, g: DropGlueKind<'tcx>)
                               -> Block<'blk, 'tcx> {
-    if collector::collecting_debug_information(bcx.ccx().shared()) {
-        bcx.ccx()
-           .record_translation_item_as_generated(TransItem::DropGlue(g));
-    }
-
     let t = g.ty();
 
     let skip_dtor = match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true };
index af175fbf88256eecb57cebe6f0863d909d94cc0f..4077b894d62d4456a76dc16f1baf83ab83b78f16 100644 (file)
@@ -8,13 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use llvm::{AvailableExternallyLinkage, InternalLinkage, SetLinkage};
 use middle::cstore::{FoundAst, InlinedItem};
 use rustc::hir::def_id::DefId;
-use rustc::ty::subst::Substs;
-use base::{push_ctxt, trans_item, trans_fn};
-use callee::Callee;
+use base::push_ctxt;
 use common::*;
+use monomorphize::Instance;
 
 use rustc::dep_graph::DepNode;
 use rustc::hir;
@@ -52,30 +50,6 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: DefId) -> Option<DefId> {
             ccx.external_srcs().borrow_mut().insert(item.id, fn_id);
 
             ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
-            trans_item(ccx, item);
-
-            if let hir::ItemFn(_, _, _, _, ref generics, _) = item.node {
-                // Generics have no symbol, so they can't be given any linkage.
-                if !generics.is_type_parameterized() {
-                    let linkage = if ccx.sess().opts.cg.codegen_units == 1 {
-                        // We could use AvailableExternallyLinkage here,
-                        // but InternalLinkage allows LLVM to optimize more
-                        // aggressively (at the cost of sometimes
-                        // duplicating code).
-                        InternalLinkage
-                    } else {
-                        // With multiple compilation units, duplicated code
-                        // is more of a problem.  Also, `codegen_units > 1`
-                        // means the user is okay with losing some
-                        // performance.
-                        AvailableExternallyLinkage
-                    };
-                    let empty_substs = tcx.mk_substs(Substs::empty());
-                    let def_id = tcx.map.local_def_id(item.id);
-                    let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                    SetLinkage(llfn, linkage);
-                }
-            }
 
             item.id
         }
@@ -135,35 +109,12 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: DefId) -> Option<DefId> {
             // don't.
             trait_item.id
         }
-        FoundAst::Found(&InlinedItem::ImplItem(impl_did, ref impl_item)) => {
+        FoundAst::Found(&InlinedItem::ImplItem(_, ref impl_item)) => {
             ccx.external().borrow_mut().insert(fn_id, Some(impl_item.id));
             ccx.external_srcs().borrow_mut().insert(impl_item.id, fn_id);
 
             ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1);
 
-            // Translate monomorphic impl methods immediately.
-            if let hir::ImplItemKind::Method(ref sig, ref body) = impl_item.node {
-                let impl_tpt = tcx.lookup_item_type(impl_did);
-                if impl_tpt.generics.types.is_empty() &&
-                        sig.generics.ty_params.is_empty() {
-                    let def_id = tcx.map.local_def_id(impl_item.id);
-                    let empty_substs = ccx.empty_substs_for_def_id(def_id);
-                    let llfn = Callee::def(ccx, def_id, empty_substs).reify(ccx).val;
-                    trans_fn(ccx,
-                             &sig.decl,
-                             body,
-                             llfn,
-                             empty_substs,
-                             impl_item.id);
-                    // See linkage comments on items.
-                    if ccx.sess().opts.cg.codegen_units == 1 {
-                        SetLinkage(llfn, InternalLinkage);
-                    } else {
-                        SetLinkage(llfn, AvailableExternallyLinkage);
-                    }
-                }
-            }
-
             impl_item.id
         }
     };
@@ -184,3 +135,12 @@ pub fn get_local_instance(ccx: &CrateContext, fn_id: DefId)
 pub fn maybe_instantiate_inline(ccx: &CrateContext, fn_id: DefId) -> DefId {
     get_local_instance(ccx, fn_id).unwrap_or(fn_id)
 }
+
+pub fn maybe_inline_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+                                       instance: Instance<'tcx>) -> Instance<'tcx> {
+    let def_id = maybe_instantiate_inline(ccx, instance.def);
+    Instance {
+        def: def_id,
+        substs: instance.substs
+    }
+}
index bd24647edf00b4b0f349c7e60f90bbab479a2dbe..a721361fce0e3aecbbd152edb8fd663507383df3 100644 (file)
@@ -617,18 +617,6 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
 
         },
 
-
-        (_, "return_address") => {
-            if !fcx.fn_ty.ret.is_indirect() {
-                span_err!(tcx.sess, span, E0510,
-                          "invalid use of `return_address` intrinsic: function \
-                           does not use out pointer");
-                C_null(Type::i8p(ccx))
-            } else {
-                PointerCast(bcx, llvm::get_param(fcx.llfn, 0), Type::i8p(ccx))
-            }
-        }
-
         (_, "discriminant_value") => {
             let val_ty = substs.types.get(FnSpace, 0);
             match val_ty.sty {
index 9cb5d8b6ad62a2e43781bb64b3b7806f5fabe425..fa0a1fdc37523dbff9f012d6a0bb33f82baf93a1 100644 (file)
@@ -122,6 +122,7 @@ pub mod back {
 mod mir;
 mod monomorphize;
 mod partitioning;
+mod symbol_map;
 mod symbol_names_test;
 mod trans_item;
 mod tvec;
@@ -129,8 +130,9 @@ pub mod back {
 mod type_of;
 mod value;
 
-#[derive(Copy, Clone)]
+#[derive(Clone)]
 pub struct ModuleTranslation {
+    pub name: String,
     pub llcx: llvm::ContextRef,
     pub llmod: llvm::ModuleRef,
 }
index 446ac91b1f58086d990aea8eeb3154542a0341de..270033be9375c20029f763de31b8da3816bc930d 100644 (file)
@@ -197,10 +197,13 @@ pub fn trans_consume(&mut self,
                         (OperandValue::Pair(a, b),
                          &mir::ProjectionElem::Field(ref f, ty)) => {
                             let llval = [a, b][f.index()];
-                            return OperandRef {
+                            let op = OperandRef {
                                 val: OperandValue::Immediate(llval),
                                 ty: bcx.monomorphize(&ty)
                             };
+
+                            // Handle nested pairs.
+                            return op.unpack_if_pair(bcx);
                         }
                         _ => {}
                     }
index ab859b88a85972d55200ba32260199974fa94422..00c0e91103500d21a26f02eff544778948f2befd 100644 (file)
@@ -17,7 +17,6 @@
 use rustc::ty::{self, Ty, TypeFoldable, TyCtxt};
 use attributes;
 use base::{push_ctxt};
-use base::trans_fn;
 use base;
 use common::*;
 use declare;
 
 use rustc::hir;
 
-use syntax::attr;
 use errors;
 
 use std::fmt;
+use trans_item::TransItem;
 
 pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
                                 fn_id: DefId,
                                 psubsts: &'tcx subst::Substs<'tcx>)
                                 -> (ValueRef, Ty<'tcx>) {
     debug!("monomorphic_fn(fn_id={:?}, real_substs={:?})", fn_id, psubsts);
-
     assert!(!psubsts.types.needs_infer() && !psubsts.types.has_param_types());
 
     let _icx = push_ctxt("monomorphic_fn");
@@ -53,6 +51,8 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
     if let Some(&val) = ccx.instances().borrow().get(&instance) {
         debug!("leaving monomorphic fn {:?}", instance);
         return (val, mono_ty);
+    } else {
+        assert!(!ccx.codegen_unit().items.contains_key(&TransItem::Fn(instance)));
     }
 
     debug!("monomorphic_fn({:?})", instance);
@@ -84,9 +84,10 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         monomorphizing.insert(fn_id, depth + 1);
     }
 
-    let symbol = instance.symbol_name(ccx.shared());
+    let symbol = ccx.symbol_map().get_or_compute(ccx.shared(),
+                                                 TransItem::Fn(instance));
 
-    debug!("monomorphize_fn mangled to {}", symbol);
+    debug!("monomorphize_fn mangled to {}", &symbol);
     assert!(declare::get_defined_value(ccx, &symbol).is_none());
 
     // FIXME(nagisa): perhaps needs a more fine grained selection?
@@ -109,33 +110,35 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
         });
     match map_node {
         hir_map::NodeItem(&hir::Item {
-            ref attrs, node: hir::ItemFn(ref decl, _, _, _, _, ref body), ..
-        }) |
-        hir_map::NodeTraitItem(&hir::TraitItem {
-            ref attrs, node: hir::MethodTraitItem(
-                hir::MethodSig { ref decl, .. }, Some(ref body)), ..
+            ref attrs,
+            node: hir::ItemFn(..), ..
         }) |
         hir_map::NodeImplItem(&hir::ImplItem {
             ref attrs, node: hir::ImplItemKind::Method(
-                hir::MethodSig { ref decl, .. }, ref body), ..
+                hir::MethodSig { .. }, _), ..
+        }) |
+        hir_map::NodeTraitItem(&hir::TraitItem {
+            ref attrs, node: hir::MethodTraitItem(
+                hir::MethodSig { .. }, Some(_)), ..
         }) => {
-            attributes::from_fn_attrs(ccx, attrs, lldecl);
-
-            let is_first = !ccx.available_monomorphizations().borrow()
-                                                             .contains(&symbol);
-            if is_first {
-                ccx.available_monomorphizations().borrow_mut().insert(symbol.clone());
-            }
+            let trans_item = TransItem::Fn(instance);
 
-            let trans_everywhere = attr::requests_inline(attrs);
-            if trans_everywhere || is_first {
-                let origin = if is_first { base::OriginalTranslation } else { base::InlinedCopy };
-                base::update_linkage(ccx, lldecl, None, origin);
-                trans_fn(ccx, decl, body, lldecl, psubsts, fn_node_id);
-            } else {
-                // We marked the value as using internal linkage earlier, but that is illegal for
-                // declarations, so switch back to external linkage.
+            if ccx.shared().translation_items().borrow().contains(&trans_item) {
+                attributes::from_fn_attrs(ccx, attrs, lldecl);
                 llvm::SetLinkage(lldecl, llvm::ExternalLinkage);
+            } else {
+                // FIXME: #34151
+                // Normally, getting here would indicate a bug in trans::collector,
+                // since it seems to have missed a translation item. When we are
+                // translating with non-MIR based trans, however, the results of
+                // the collector are not entirely reliable since it bases its
+                // analysis on MIR. Thus, we'll instantiate the missing function
+                // privately in this codegen unit, so that things keep working.
+                ccx.stats().n_fallback_instantiations.set(ccx.stats()
+                                                             .n_fallback_instantiations
+                                                             .get() + 1);
+                trans_item.predefine(ccx, llvm::InternalLinkage);
+                trans_item.define(ccx);
             }
         }
 
index 2ded643ef4fdd309da56c954925557c879a8f2b1..8073359ede87ebc058689e317e484ac862390aac 100644 (file)
 use monomorphize;
 use rustc::hir::def_id::DefId;
 use rustc::hir::map::DefPathData;
+use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
 use rustc::ty::TyCtxt;
 use rustc::ty::item_path::characteristic_def_id_of_type;
+use std::cmp::Ordering;
+use symbol_map::SymbolMap;
+use syntax::ast::NodeId;
 use syntax::parse::token::{self, InternedString};
 use trans_item::TransItem;
-use util::nodemap::{FnvHashMap, FnvHashSet};
-
-pub struct CodegenUnit<'tcx> {
-    pub name: InternedString,
-    pub items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
-}
+use util::nodemap::{FnvHashMap, FnvHashSet, NodeSet};
 
 pub enum PartitioningStrategy {
     /// Generate one codegen unit per source-level module.
@@ -140,25 +139,95 @@ pub enum PartitioningStrategy {
     FixedUnitCount(usize)
 }
 
+pub struct CodegenUnit<'tcx> {
+    pub name: InternedString,
+    pub items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>,
+}
+
+impl<'tcx> CodegenUnit<'tcx> {
+    pub fn items_in_deterministic_order(&self,
+                                        tcx: TyCtxt,
+                                        symbol_map: &SymbolMap)
+                                        -> Vec<(TransItem<'tcx>, llvm::Linkage)> {
+        let mut items: Vec<(TransItem<'tcx>, llvm::Linkage)> =
+            self.items.iter().map(|(item, linkage)| (*item, *linkage)).collect();
+
+        // The codegen tests rely on items being process in the same order as
+        // they appear in the file, so for local items, we sort by node_id first
+        items.sort_by(|&(trans_item1, _), &(trans_item2, _)| {
+            let node_id1 = local_node_id(tcx, trans_item1);
+            let node_id2 = local_node_id(tcx, trans_item2);
+
+            match (node_id1, node_id2) {
+                (None, None) => {
+                    let symbol_name1 = symbol_map.get(trans_item1).unwrap();
+                    let symbol_name2 = symbol_map.get(trans_item2).unwrap();
+                    symbol_name1.cmp(symbol_name2)
+                }
+                // In the following two cases we can avoid looking up the symbol
+                (None, Some(_)) => Ordering::Less,
+                (Some(_), None) => Ordering::Greater,
+                (Some(node_id1), Some(node_id2)) => {
+                    let ordering = node_id1.cmp(&node_id2);
+
+                    if ordering != Ordering::Equal {
+                        return ordering;
+                    }
+
+                    let symbol_name1 = symbol_map.get(trans_item1).unwrap();
+                    let symbol_name2 = symbol_map.get(trans_item2).unwrap();
+                    symbol_name1.cmp(symbol_name2)
+                }
+            }
+        });
+
+        return items;
+
+        fn local_node_id(tcx: TyCtxt, trans_item: TransItem) -> Option<NodeId> {
+            match trans_item {
+                TransItem::Fn(instance) => {
+                    tcx.map.as_local_node_id(instance.def)
+                }
+                TransItem::Static(node_id) => Some(node_id),
+                TransItem::DropGlue(_) => None,
+            }
+        }
+    }
+}
+
+
 // Anything we can't find a proper codegen unit for goes into this.
 const FALLBACK_CODEGEN_UNIT: &'static str = "__rustc_fallback_codegen_unit";
 
 pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                               trans_items: I,
                               strategy: PartitioningStrategy,
-                              inlining_map: &InliningMap<'tcx>)
+                              inlining_map: &InliningMap<'tcx>,
+                              reachable: &NodeSet)
                               -> Vec<CodegenUnit<'tcx>>
     where I: Iterator<Item = TransItem<'tcx>>
 {
+    if let PartitioningStrategy::FixedUnitCount(1) = strategy {
+        // If there is only a single codegen-unit, we can use a very simple
+        // scheme and don't have to bother with doing much analysis.
+        return vec![single_codegen_unit(tcx, trans_items, reachable)];
+    }
+
     // In the first step, we place all regular translation items into their
     // respective 'home' codegen unit. Regular translation items are all
     // functions and statics defined in the local crate.
-    let mut initial_partitioning = place_root_translation_items(tcx, trans_items);
+    let mut initial_partitioning = place_root_translation_items(tcx,
+                                                                trans_items,
+                                                                reachable);
+
+    debug_dump(tcx, "INITIAL PARTITONING:", initial_partitioning.codegen_units.iter());
 
     // If the partitioning should produce a fixed count of codegen units, merge
     // until that count is reached.
     if let PartitioningStrategy::FixedUnitCount(count) = strategy {
         merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name[..]);
+
+        debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter());
     }
 
     // In the next step, we use the inlining map to determine which addtional
@@ -167,7 +236,16 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // local functions the definition of which is marked with #[inline].
     let post_inlining = place_inlined_translation_items(initial_partitioning,
                                                         inlining_map);
-    post_inlining.0
+
+    debug_dump(tcx, "POST INLINING:", post_inlining.0.iter());
+
+    // Finally, sort by codegen unit name, so that we get deterministic results
+    let mut result = post_inlining.0;
+    result.sort_by(|cgu1, cgu2| {
+        (&cgu1.name[..]).cmp(&cgu2.name[..])
+    });
+
+    result
 }
 
 struct PreInliningPartitioning<'tcx> {
@@ -178,7 +256,8 @@ struct PreInliningPartitioning<'tcx> {
 struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
 
 fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                             trans_items: I)
+                                             trans_items: I,
+                                             _reachable: &NodeSet)
                                              -> PreInliningPartitioning<'tcx>
     where I: Iterator<Item = TransItem<'tcx>>
 {
@@ -186,15 +265,11 @@ fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let mut codegen_units = FnvHashMap();
 
     for trans_item in trans_items {
-        let is_root = match trans_item {
-            TransItem::Static(..) => true,
-            TransItem::DropGlue(..) => false,
-            TransItem::Fn(_) => !trans_item.is_from_extern_crate(),
-        };
+        let is_root = !trans_item.is_instantiated_only_on_demand();
 
         if is_root {
             let characteristic_def_id = characteristic_def_id_of_trans_item(tcx, trans_item);
-            let is_volatile = trans_item.is_lazily_instantiated();
+            let is_volatile = trans_item.is_generic_fn();
 
             let codegen_unit_name = match characteristic_def_id {
                 Some(def_id) => compute_codegen_unit_name(tcx, def_id, is_volatile),
@@ -218,7 +293,18 @@ fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                         TransItem::Static(..) => llvm::ExternalLinkage,
                         TransItem::DropGlue(..) => unreachable!(),
                         // Is there any benefit to using ExternalLinkage?:
-                        TransItem::Fn(..) => llvm::WeakODRLinkage,
+                        TransItem::Fn(ref instance) => {
+                            if instance.substs.types.is_empty() {
+                                // This is a non-generic functions, we always
+                                // make it visible externally on the chance that
+                                // it might be used in another codegen unit.
+                                llvm::ExternalLinkage
+                            } else {
+                                // In the current setup, generic functions cannot
+                                // be roots.
+                                unreachable!()
+                            }
+                        }
                     }
                 }
             };
@@ -258,7 +344,7 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
     // translation items in a given unit. This could be improved on.
     while codegen_units.len() > target_cgu_count {
         // Sort small cgus to the back
-        codegen_units.as_mut_slice().sort_by_key(|cgu| -(cgu.items.len() as i64));
+        codegen_units.sort_by_key(|cgu| -(cgu.items.len() as i64));
         let smallest = codegen_units.pop().unwrap();
         let second_smallest = codegen_units.last_mut().unwrap();
 
@@ -281,10 +367,6 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
             items: FnvHashMap()
         });
     }
-
-    fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
-        token::intern_and_get_ident(&format!("{}.{}", crate_name, index)[..])
-    }
 }
 
 fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartitioning<'tcx>,
@@ -309,20 +391,30 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
             if let Some(linkage) = codegen_unit.items.get(&trans_item) {
                 // This is a root, just copy it over
                 new_codegen_unit.items.insert(trans_item, *linkage);
+            } else if initial_partitioning.roots.contains(&trans_item) {
+                // This item will be instantiated in some other codegen unit,
+                // so we just add it here with AvailableExternallyLinkage
+                // FIXME(mw): I have not seen it happening yet but having
+                //            available_externally here could potentially lead
+                //            to the same problem with exception handling tables
+                //            as in the case below.
+                new_codegen_unit.items.insert(trans_item,
+                                              llvm::AvailableExternallyLinkage);
+            } else if trans_item.is_from_extern_crate() && !trans_item.is_generic_fn() {
+                // FIXME(mw): It would be nice if we could mark these as
+                // `AvailableExternallyLinkage`, since they should have
+                // been instantiated in the extern crate. But this
+                // sometimes leads to crashes on Windows because LLVM
+                // does not handle exception handling table instantiation
+                // reliably in that case.
+                new_codegen_unit.items.insert(trans_item, llvm::InternalLinkage);
             } else {
-                if initial_partitioning.roots.contains(&trans_item) {
-                    // This item will be instantiated in some other codegen unit,
-                    // so we just add it here with AvailableExternallyLinkage
-                    new_codegen_unit.items.insert(trans_item,
-                                                  llvm::AvailableExternallyLinkage);
-                } else {
-                    // We can't be sure if this will also be instantiated
-                    // somewhere else, so we add an instance here with
-                    // LinkOnceODRLinkage. That way the item can be discarded if
-                    // it's not needed (inlined) after all.
-                    new_codegen_unit.items.insert(trans_item,
-                                                  llvm::LinkOnceODRLinkage);
-                }
+                assert!(trans_item.is_instantiated_only_on_demand());
+                // We can't be sure if this will also be instantiated
+                // somewhere else, so we add an instance here with
+                // InternalLinkage so we don't get any conflicts.
+                new_codegen_unit.items.insert(trans_item,
+                                              llvm::InternalLinkage);
             }
         }
 
@@ -410,3 +502,93 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
     return token::intern_and_get_ident(&mod_path[..]);
 }
+
+fn single_codegen_unit<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                    trans_items: I,
+                                    reachable: &NodeSet)
+                                    -> CodegenUnit<'tcx>
+    where I: Iterator<Item = TransItem<'tcx>>
+{
+    let mut items = FnvHashMap();
+
+    for trans_item in trans_items {
+        let linkage = trans_item.explicit_linkage(tcx).unwrap_or_else(|| {
+            match trans_item {
+                TransItem::Static(node_id) => {
+                    if reachable.contains(&node_id) {
+                        llvm::ExternalLinkage
+                    } else {
+                        llvm::PrivateLinkage
+                    }
+                }
+                TransItem::DropGlue(_) => {
+                    llvm::InternalLinkage
+                }
+                TransItem::Fn(instance) => {
+                    if trans_item.is_generic_fn() {
+                        // FIXME(mw): Assigning internal linkage to all
+                        // monomorphizations is potentially a waste of space
+                        // since monomorphizations could be shared between
+                        // crates. The main reason for making them internal is
+                        // a limitation in MingW's binutils that cannot deal
+                        // with COFF object that have more than 2^15 sections,
+                        // which is something that can happen for large programs
+                        // when every function gets put into its own COMDAT
+                        // section.
+                        llvm::InternalLinkage
+                    } else if trans_item.is_from_extern_crate() {
+                        // FIXME(mw): It would be nice if we could mark these as
+                        // `AvailableExternallyLinkage`, since they should have
+                        // been instantiated in the extern crate. But this
+                        // sometimes leads to crashes on Windows because LLVM
+                        // does not handle exception handling table instantiation
+                        // reliably in that case.
+                        llvm::InternalLinkage
+                    } else if reachable.contains(&tcx.map
+                                                     .as_local_node_id(instance.def)
+                                                     .unwrap()) {
+                        llvm::ExternalLinkage
+                    } else {
+                        // Functions that are not visible outside this crate can
+                        // be marked as internal.
+                        llvm::InternalLinkage
+                    }
+                }
+            }
+        });
+
+        items.insert(trans_item, linkage);
+    }
+
+    CodegenUnit {
+        name: numbered_codegen_unit_name(&tcx.crate_name[..], 0),
+        items: items
+    }
+}
+
+fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
+    token::intern_and_get_ident(&format!("{}{}{}",
+        crate_name,
+        NUMBERED_CODEGEN_UNIT_MARKER,
+        index)[..])
+}
+
+fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                               label: &str,
+                               cgus: I)
+    where I: Iterator<Item=&'b CodegenUnit<'tcx>>,
+          'tcx: 'a + 'b
+{
+    if cfg!(debug_assertions) {
+        debug!("{}", label);
+        for cgu in cgus {
+            debug!("CodegenUnit {}:", cgu.name);
+
+            for (trans_item, linkage) in &cgu.items {
+                debug!(" - {} [{:?}]", trans_item.to_string(tcx), linkage);
+            }
+
+            debug!("");
+        }
+    }
+}
diff --git a/src/librustc_trans/symbol_map.rs b/src/librustc_trans/symbol_map.rs
new file mode 100644 (file)
index 0000000..3faaa08
--- /dev/null
@@ -0,0 +1,128 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use context::SharedCrateContext;
+use monomorphize::Instance;
+use rustc::ty::TyCtxt;
+use std::borrow::Cow;
+use syntax::codemap::Span;
+use trans_item::TransItem;
+use util::nodemap::FnvHashMap;
+
+// In the SymbolMap we collect the symbol names of all translation items of
+// the current crate. This map exists as a performance optimization. Symbol
+// names of translation items are deterministic and fully defined by the item.
+// Thus they could also always be recomputed if needed.
+
+pub struct SymbolMap<'tcx> {
+    index: FnvHashMap<TransItem<'tcx>, (usize, usize)>,
+    arena: String,
+}
+
+impl<'tcx> SymbolMap<'tcx> {
+
+    pub fn build<'a, I>(scx: &SharedCrateContext<'a, 'tcx>,
+                        trans_items: I)
+                        -> SymbolMap<'tcx>
+        where I: Iterator<Item=TransItem<'tcx>>
+    {
+        // Check for duplicate symbol names
+        let mut symbols: Vec<_> = trans_items.map(|trans_item| {
+            (trans_item, trans_item.compute_symbol_name(scx))
+        }).collect();
+
+        (&mut symbols[..]).sort_by(|&(_, ref sym1), &(_, ref sym2)|{
+            sym1.cmp(sym2)
+        });
+
+        for pair in (&symbols[..]).windows(2) {
+            let sym1 = &pair[0].1;
+            let sym2 = &pair[1].1;
+
+            if *sym1 == *sym2 {
+                let trans_item1 = pair[0].0;
+                let trans_item2 = pair[1].0;
+
+                let span1 = get_span(scx.tcx(), trans_item1);
+                let span2 = get_span(scx.tcx(), trans_item2);
+
+                // Deterministically select one of the spans for error reporting
+                let span = match (span1, span2) {
+                    (Some(span1), Some(span2)) => {
+                        Some(if span1.lo.0 > span2.lo.0 {
+                            span1
+                        } else {
+                            span2
+                        })
+                    }
+                    (Some(span), None) |
+                    (None, Some(span)) => Some(span),
+                    _ => None
+                };
+
+                let error_message = format!("symbol `{}` is already defined", sym1);
+
+                if let Some(span) = span {
+                    scx.sess().span_fatal(span, &error_message)
+                } else {
+                    scx.sess().fatal(&error_message)
+                }
+            }
+        }
+
+        let mut symbol_map = SymbolMap {
+            index: FnvHashMap(),
+            arena: String::with_capacity(1024),
+        };
+
+        for (trans_item, symbol) in symbols {
+            let start_index = symbol_map.arena.len();
+            symbol_map.arena.push_str(&symbol[..]);
+            let end_index = symbol_map.arena.len();
+            let prev_entry = symbol_map.index.insert(trans_item,
+                                                     (start_index, end_index));
+            if prev_entry.is_some() {
+                bug!("TransItem encountered twice?")
+            }
+        }
+
+        fn get_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                              trans_item: TransItem<'tcx>) -> Option<Span> {
+            match trans_item {
+                TransItem::Fn(Instance { def, .. }) => {
+                    tcx.map.as_local_node_id(def)
+                }
+                TransItem::Static(node_id) => Some(node_id),
+                TransItem::DropGlue(_) => None,
+            }.map(|node_id| {
+                tcx.map.span(node_id)
+            })
+        }
+
+        symbol_map
+    }
+
+    pub fn get(&self, trans_item: TransItem<'tcx>) -> Option<&str> {
+        self.index.get(&trans_item).map(|&(start_index, end_index)| {
+            &self.arena[start_index .. end_index]
+        })
+    }
+
+    pub fn get_or_compute<'map, 'scx>(&'map self,
+                                      scx: &SharedCrateContext<'scx, 'tcx>,
+                                      trans_item: TransItem<'tcx>)
+                                      -> Cow<'map, str> {
+        if let Some(sym) = self.get(trans_item) {
+            Cow::from(sym)
+        } else {
+            Cow::from(trans_item.compute_symbol_name(scx))
+        }
+    }
+}
index 11e9e9f3204a28a3b9e0860e3434596a9dbfcf61..9a7fe54e0d9f5a9f3dc55cfb96e9efc8af1294e2 100644 (file)
 use syntax::ast;
 use syntax::attr::AttrMetaMethods;
 
-use common::CrateContext;
+use common::SharedCrateContext;
 use monomorphize::Instance;
 
 const SYMBOL_NAME: &'static str = "rustc_symbol_name";
 const ITEM_PATH: &'static str = "rustc_item_path";
 
-pub fn report_symbol_names(ccx: &CrateContext) {
+pub fn report_symbol_names(scx: &SharedCrateContext) {
     // if the `rustc_attrs` feature is not enabled, then the
     // attributes we are interested in cannot be present anyway, so
     // skip the walk.
-    let tcx = ccx.tcx();
+    let tcx = scx.tcx();
     if !tcx.sess.features.borrow().rustc_attrs {
         return;
     }
 
     let _ignore = tcx.dep_graph.in_ignore();
-    let mut visitor = SymbolNamesTest { ccx: ccx };
+    let mut visitor = SymbolNamesTest { scx: scx };
     tcx.map.krate().visit_all_items(&mut visitor);
 }
 
 struct SymbolNamesTest<'a, 'tcx:'a> {
-    ccx: &'a CrateContext<'a, 'tcx>,
+    scx: &'a SharedCrateContext<'a, 'tcx>,
 }
 
 impl<'a, 'tcx> SymbolNamesTest<'a, 'tcx> {
     fn process_attrs(&mut self,
                      node_id: ast::NodeId) {
-        let tcx = self.ccx.tcx();
+        let tcx = self.scx.tcx();
         let def_id = tcx.map.local_def_id(node_id);
         for attr in tcx.get_attrs(def_id).iter() {
             if attr.check_name(SYMBOL_NAME) {
                 // for now, can only use on monomorphic names
-                let instance = Instance::mono(self.ccx.shared(), def_id);
-                let name = instance.symbol_name(self.ccx.shared());
+                let instance = Instance::mono(self.scx, def_id);
+                let name = instance.symbol_name(self.scx);
                 tcx.sess.span_err(attr.span, &format!("symbol-name({})", name));
             } else if attr.check_name(ITEM_PATH) {
                 let path = tcx.item_path_str(def_id);
index d7c5c41a156ba4c239780301a87881fc42087953..b7b18b2631bee91f5148405b28713265fa0b9758 100644 (file)
 //! item-path. This is used for unit testing the code that generates
 //! paths etc in all kinds of annoying scenarios.
 
-use base::llvm_linkage_by_name;
+use attributes;
+use base;
+use consts;
+use context::{CrateContext, SharedCrateContext};
+use declare;
 use glue::DropGlueKind;
 use llvm;
-use monomorphize::Instance;
+use monomorphize::{self, Instance};
+use inline;
 use rustc::hir;
+use rustc::hir::map as hir_map;
 use rustc::hir::def_id::DefId;
-use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
 use rustc::ty::subst;
 use std::hash::{Hash, Hasher};
 use syntax::ast::{self, NodeId};
-use syntax::attr;
+use syntax::{attr,errors};
 use syntax::parse::token;
+use type_of;
+use glue;
+use abi::{Abi, FnType};
+use back::symbol_names;
 
 #[derive(PartialEq, Eq, Clone, Copy, Debug)]
 pub enum TransItem<'tcx> {
@@ -54,6 +64,314 @@ fn hash<H: Hasher>(&self, s: &mut H) {
     }
 }
 
+impl<'a, 'tcx> TransItem<'tcx> {
+
+    pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) {
+
+        debug!("BEGIN IMPLEMENTING '{} ({})' in cgu {}",
+                  self.to_string(ccx.tcx()),
+                  self.to_raw_string(),
+                  ccx.codegen_unit().name);
+
+        match *self {
+            TransItem::Static(node_id) => {
+                let item = ccx.tcx().map.expect_item(node_id);
+                if let hir::ItemStatic(_, m, ref expr) = item.node {
+                    match consts::trans_static(&ccx, m, expr, item.id, &item.attrs) {
+                        Ok(_) => { /* Cool, everything's alright. */ },
+                        Err(err) => ccx.tcx().sess.span_fatal(expr.span, &err.description()),
+                    };
+                } else {
+                    span_bug!(item.span, "Mismatch between hir::Item type and TransItem type")
+                }
+            }
+            TransItem::Fn(instance) => {
+                base::trans_instance(&ccx, instance);
+            }
+            TransItem::DropGlue(dg) => {
+                glue::implement_drop_glue(&ccx, dg);
+            }
+        }
+
+        debug!("END IMPLEMENTING '{} ({})' in cgu {}",
+               self.to_string(ccx.tcx()),
+               self.to_raw_string(),
+               ccx.codegen_unit().name);
+    }
+
+    pub fn predefine(&self,
+                     ccx: &CrateContext<'a, 'tcx>,
+                     linkage: llvm::Linkage) {
+        debug!("BEGIN PREDEFINING '{} ({})' in cgu {}",
+               self.to_string(ccx.tcx()),
+               self.to_raw_string(),
+               ccx.codegen_unit().name);
+
+        let symbol_name = ccx.symbol_map()
+                             .get_or_compute(ccx.shared(), *self);
+
+        debug!("symbol {}", &symbol_name);
+
+        match *self {
+            TransItem::Static(node_id) => {
+                TransItem::predefine_static(ccx, node_id, linkage, &symbol_name);
+            }
+            TransItem::Fn(instance) => {
+                TransItem::predefine_fn(ccx, instance, linkage, &symbol_name);
+            }
+            TransItem::DropGlue(dg) => {
+                TransItem::predefine_drop_glue(ccx, dg, linkage, &symbol_name);
+            }
+        }
+
+        debug!("END PREDEFINING '{} ({})' in cgu {}",
+               self.to_string(ccx.tcx()),
+               self.to_raw_string(),
+               ccx.codegen_unit().name);
+    }
+
+    fn predefine_static(ccx: &CrateContext<'a, 'tcx>,
+                        node_id: ast::NodeId,
+                        linkage: llvm::Linkage,
+                        symbol_name: &str) {
+        let def_id = ccx.tcx().map.local_def_id(node_id);
+        let ty = ccx.tcx().lookup_item_type(def_id).ty;
+        let llty = type_of::type_of(ccx, ty);
+
+        match ccx.tcx().map.get(node_id) {
+            hir::map::NodeItem(&hir::Item {
+                span, node: hir::ItemStatic(..), ..
+            }) => {
+                let g = declare::define_global(ccx, symbol_name, llty).unwrap_or_else(|| {
+                    ccx.sess().span_fatal(span,
+                        &format!("symbol `{}` is already defined", symbol_name))
+                });
+
+                llvm::SetLinkage(g, linkage);
+            }
+
+            item => bug!("predefine_static: expected static, found {:?}", item)
+        }
+    }
+
+    fn predefine_fn(ccx: &CrateContext<'a, 'tcx>,
+                    instance: Instance<'tcx>,
+                    linkage: llvm::Linkage,
+                    symbol_name: &str) {
+        assert!(!instance.substs.types.needs_infer() &&
+                !instance.substs.types.has_param_types());
+
+        let instance = inline::maybe_inline_instance(ccx, instance);
+
+        let item_ty = ccx.tcx().lookup_item_type(instance.def).ty;
+        let item_ty = ccx.tcx().erase_regions(&item_ty);
+        let mono_ty = monomorphize::apply_param_substs(ccx.tcx(), instance.substs, &item_ty);
+
+        let fn_node_id = ccx.tcx().map.as_local_node_id(instance.def).unwrap();
+        let map_node = errors::expect(
+            ccx.sess().diagnostic(),
+            ccx.tcx().map.find(fn_node_id),
+            || {
+                format!("while instantiating `{}`, couldn't find it in \
+                     the item map (may have attempted to monomorphize \
+                     an item defined in a different crate?)",
+                    instance)
+            });
+
+        match map_node {
+            hir_map::NodeItem(&hir::Item {
+                ref attrs, node: hir::ItemFn(..), ..
+            }) |
+            hir_map::NodeTraitItem(&hir::TraitItem {
+                ref attrs, node: hir::MethodTraitItem(..), ..
+            }) |
+            hir_map::NodeImplItem(&hir::ImplItem {
+                ref attrs, node: hir::ImplItemKind::Method(..), ..
+            }) => {
+                let lldecl = declare::declare_fn(ccx, symbol_name, mono_ty);
+                llvm::SetLinkage(lldecl, linkage);
+                base::set_link_section(ccx, lldecl, attrs);
+                if linkage == llvm::LinkOnceODRLinkage ||
+                   linkage == llvm::WeakODRLinkage {
+                    llvm::SetUniqueComdat(ccx.llmod(), lldecl);
+                }
+
+                attributes::from_fn_attrs(ccx, attrs, lldecl);
+                ccx.instances().borrow_mut().insert(instance, lldecl);
+            }
+            _ => bug!("Invalid item for TransItem::Fn: `{:?}`", map_node)
+        };
+
+    }
+
+    fn predefine_drop_glue(ccx: &CrateContext<'a, 'tcx>,
+                           dg: glue::DropGlueKind<'tcx>,
+                           linkage: llvm::Linkage,
+                           symbol_name: &str) {
+        let tcx = ccx.tcx();
+        assert_eq!(dg.ty(), glue::get_drop_glue_type(tcx, dg.ty()));
+        let t = dg.ty();
+
+        let sig = ty::FnSig {
+            inputs: vec![tcx.mk_mut_ptr(tcx.types.i8)],
+            output: ty::FnOutput::FnConverging(tcx.mk_nil()),
+            variadic: false,
+        };
+
+        // Create a FnType for fn(*mut i8) and substitute the real type in
+        // later - that prevents FnType from splitting fat pointers up.
+        let mut fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]);
+        fn_ty.args[0].original_ty = type_of::type_of(ccx, t).ptr_to();
+        let llfnty = fn_ty.llvm_type(ccx);
+
+        assert!(declare::get_defined_value(ccx, symbol_name).is_none());
+        let llfn = declare::declare_cfn(ccx, symbol_name, llfnty);
+        llvm::SetLinkage(llfn, linkage);
+        if linkage == llvm::LinkOnceODRLinkage ||
+           linkage == llvm::WeakODRLinkage {
+            llvm::SetUniqueComdat(ccx.llmod(), llfn);
+        }
+        attributes::set_frame_pointer_elimination(ccx, llfn);
+        ccx.drop_glues().borrow_mut().insert(dg, (llfn, fn_ty));
+    }
+
+    pub fn compute_symbol_name(&self,
+                               scx: &SharedCrateContext<'a, 'tcx>) -> String {
+        match *self {
+            TransItem::Fn(instance) => instance.symbol_name(scx),
+            TransItem::Static(node_id) => {
+                let def_id = scx.tcx().map.local_def_id(node_id);
+                Instance::mono(scx, def_id).symbol_name(scx)
+            }
+            TransItem::DropGlue(dg) => {
+                let prefix = match dg {
+                    DropGlueKind::Ty(_) => "drop",
+                    DropGlueKind::TyContents(_) => "drop_contents",
+                };
+                symbol_names::exported_name_from_type_and_prefix(scx, dg.ty(), prefix)
+            }
+        }
+    }
+
+    pub fn requests_inline(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => {
+                !instance.substs.types.is_empty() || {
+                    let attributes = tcx.get_attrs(instance.def);
+                    attr::requests_inline(&attributes[..])
+                }
+            }
+            TransItem::DropGlue(..) => true,
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn is_from_extern_crate(&self) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => !instance.def.is_local(),
+            TransItem::DropGlue(..) |
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn is_instantiated_only_on_demand(&self) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => !instance.def.is_local() ||
+                                           !instance.substs.types.is_empty(),
+            TransItem::DropGlue(..) => true,
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn is_generic_fn(&self) -> bool {
+        match *self {
+            TransItem::Fn(ref instance) => !instance.substs.types.is_empty(),
+            TransItem::DropGlue(..) |
+            TransItem::Static(..)   => false,
+        }
+    }
+
+    pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
+        let def_id = match *self {
+            TransItem::Fn(ref instance) => instance.def,
+            TransItem::Static(node_id) => tcx.map.local_def_id(node_id),
+            TransItem::DropGlue(..) => return None,
+        };
+
+        let attributes = tcx.get_attrs(def_id);
+        if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
+            if let Some(linkage) = base::llvm_linkage_by_name(&name) {
+                Some(linkage)
+            } else {
+                let span = tcx.map.span_if_local(def_id);
+                if let Some(span) = span {
+                    tcx.sess.span_fatal(span, "invalid linkage specified")
+                } else {
+                    tcx.sess.fatal(&format!("invalid linkage specified: {}", name))
+                }
+            }
+        } else {
+            None
+        }
+    }
+
+    pub fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
+        let hir_map = &tcx.map;
+
+        return match *self {
+            TransItem::DropGlue(dg) => {
+                let mut s = String::with_capacity(32);
+                match dg {
+                    DropGlueKind::Ty(_) => s.push_str("drop-glue "),
+                    DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "),
+                };
+                push_unique_type_name(tcx, dg.ty(), &mut s);
+                s
+            }
+            TransItem::Fn(instance) => {
+                to_string_internal(tcx, "fn ", instance)
+            },
+            TransItem::Static(node_id) => {
+                let def_id = hir_map.local_def_id(node_id);
+                let instance = Instance::new(def_id,
+                                             tcx.mk_substs(subst::Substs::empty()));
+                to_string_internal(tcx, "static ", instance)
+            },
+        };
+
+        fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                        prefix: &str,
+                                        instance: Instance<'tcx>)
+                                        -> String {
+            let mut result = String::with_capacity(32);
+            result.push_str(prefix);
+            push_instance_as_string(tcx, instance, &mut result);
+            result
+        }
+    }
+
+    pub fn to_raw_string(&self) -> String {
+        match *self {
+            TransItem::DropGlue(dg) => {
+                let prefix = match dg {
+                    DropGlueKind::Ty(_) => "Ty",
+                    DropGlueKind::TyContents(_) => "TyContents",
+                };
+                format!("DropGlue({}: {})", prefix, dg.ty() as *const _ as usize)
+            }
+            TransItem::Fn(instance) => {
+                format!("Fn({:?}, {})",
+                         instance.def,
+                         instance.substs as *const _ as usize)
+            }
+            TransItem::Static(id) => {
+                format!("Static({:?})", id)
+            }
+        }
+    }
+}
+
+
 //=-----------------------------------------------------------------------------
 // TransItem String Keys
 //=-----------------------------------------------------------------------------
@@ -277,108 +595,3 @@ pub fn type_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     push_unique_type_name(tcx, ty, &mut output);
     output
 }
-
-impl<'tcx> TransItem<'tcx> {
-
-    pub fn requests_inline<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
-        match *self {
-            TransItem::Fn(ref instance) => {
-                let attributes = tcx.get_attrs(instance.def);
-                attr::requests_inline(&attributes[..])
-            }
-            TransItem::DropGlue(..) => true,
-            TransItem::Static(..)   => false,
-        }
-    }
-
-    pub fn is_from_extern_crate(&self) -> bool {
-        match *self {
-            TransItem::Fn(ref instance) => !instance.def.is_local(),
-            TransItem::DropGlue(..) |
-            TransItem::Static(..)   => false,
-        }
-    }
-
-    pub fn is_lazily_instantiated(&self) -> bool {
-        match *self {
-            TransItem::Fn(ref instance) => !instance.substs.types.is_empty(),
-            TransItem::DropGlue(..) => true,
-            TransItem::Static(..)   => false,
-        }
-    }
-
-    pub fn explicit_linkage<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
-        let def_id = match *self {
-            TransItem::Fn(ref instance) => instance.def,
-            TransItem::Static(node_id) => tcx.map.local_def_id(node_id),
-            TransItem::DropGlue(..) => return None,
-        };
-
-        let attributes = tcx.get_attrs(def_id);
-        if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
-            if let Some(linkage) = llvm_linkage_by_name(&name) {
-                Some(linkage)
-            } else {
-                let span = tcx.map.span_if_local(def_id);
-                if let Some(span) = span {
-                    tcx.sess.span_fatal(span, "invalid linkage specified")
-                } else {
-                    tcx.sess.fatal(&format!("invalid linkage specified: {}", name))
-                }
-            }
-        } else {
-            None
-        }
-    }
-
-    pub fn to_string<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
-        let hir_map = &tcx.map;
-
-        return match *self {
-            TransItem::DropGlue(dg) => {
-                let mut s = String::with_capacity(32);
-                match dg {
-                    DropGlueKind::Ty(_) => s.push_str("drop-glue "),
-                    DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "),
-                };
-                push_unique_type_name(tcx, dg.ty(), &mut s);
-                s
-            }
-            TransItem::Fn(instance) => {
-                to_string_internal(tcx, "fn ", instance)
-            },
-            TransItem::Static(node_id) => {
-                let def_id = hir_map.local_def_id(node_id);
-                let empty_substs = tcx.mk_substs(subst::Substs::empty());
-                let instance = Instance::new(def_id, empty_substs);
-                to_string_internal(tcx, "static ", instance)
-            },
-        };
-
-        fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                        prefix: &str,
-                                        instance: Instance<'tcx>)
-                                        -> String {
-            let mut result = String::with_capacity(32);
-            result.push_str(prefix);
-            push_instance_as_string(tcx, instance, &mut result);
-            result
-        }
-    }
-
-    pub fn to_raw_string(&self) -> String {
-        match *self {
-            TransItem::DropGlue(dg) => {
-                format!("DropGlue({})", dg.ty() as *const _ as usize)
-            }
-            TransItem::Fn(instance) => {
-                format!("Fn({:?}, {})",
-                         instance.def,
-                         instance.substs as *const _ as usize)
-            }
-            TransItem::Static(id) => {
-                format!("Static({:?})", id)
-            }
-        }
-    }
-}
index 0fb08ec9855de006bebaa15071f41af6dc453d43..5a3268e9e447b25a9165c02eec623c5f6164cedd 100644 (file)
@@ -275,8 +275,6 @@ fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> {
             "fadd_fast" | "fsub_fast" | "fmul_fast" | "fdiv_fast" | "frem_fast" =>
                 (1, vec![param(ccx, 0), param(ccx, 0)], param(ccx, 0)),
 
-            "return_address" => (0, vec![], tcx.mk_imm_ptr(tcx.types.u8)),
-
             "assume" => (0, vec![tcx.types.bool], tcx.mk_nil()),
 
             "discriminant_value" => (1, vec![
index 139dedeb70fdf061cee3dc7dc1ee9277836134a3..7827459baa87f310818813f69320e9982cbd0859 100644 (file)
@@ -317,6 +317,9 @@ pub fn is_method(&self) -> bool {
     pub fn is_ty_method(&self) -> bool {
         ItemType::from_item(self) == ItemType::TyMethod
     }
+    pub fn is_primitive(&self) -> bool {
+        ItemType::from_item(self) == ItemType::Primitive
+    }
     pub fn is_stripped(&self) -> bool {
         match self.inner { StrippedItem(..) => true, _ => false }
     }
index 84e98a6739193e9b917135c3b218e00cb15bd08f..096e1ecc9ffb6a33a67b1dceb84a2b8837dd1d26 100644 (file)
@@ -107,7 +107,7 @@ pub enum Class {
 ///
 /// The classifier will call into the `Writer` implementation as it finds spans
 /// of text to highlight. Exactly how that text should be highlighted is up to
-/// the implemention.
+/// the implementation.
 pub trait Writer {
     /// Called when we start processing a span of text that should be highlighted.
     /// The `Class` argument specifies how it should be highlighted.
index acf867561a6370aa6084c3426f6497071dd7118b..c263bcb04e9b6fc62b199100b5a65c6a47211598 100644 (file)
@@ -1304,7 +1304,12 @@ fn render(writer: &mut io::Write, cx: &Context, it: &clean::Item,
                 *slot.borrow_mut() = cx.current.clone();
             });
 
-            let mut title = cx.current.join("::");
+            let mut title = if it.is_primitive() {
+                // No need to include the namespace for primitive types
+                String::new()
+            } else {
+                cx.current.join("::")
+            };
             if pushname {
                 if !title.is_empty() {
                     title.push_str("::");
@@ -1555,11 +1560,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
             clean::PrimitiveItem(..) => write!(fmt, "Primitive Type ")?,
             _ => {}
         }
-        let is_primitive = match self.item.inner {
-            clean::PrimitiveItem(..) => true,
-            _ => false,
-        };
-        if !is_primitive {
+        if !self.item.is_primitive() {
             let cur = &self.cx.current;
             let amt = if self.item.is_mod() { cur.len() - 1 } else { cur.len() };
             for (i, component) in cur.iter().enumerate().take(amt) {
@@ -1591,7 +1592,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
         // [src] link in the downstream documentation will actually come back to
         // this page, and this link will be auto-clicked. The `id` attribute is
         // used to find the link to auto-click.
-        if self.cx.shared.include_sources && !is_primitive {
+        if self.cx.shared.include_sources && !self.item.is_primitive() {
             if let Some(l) = self.href() {
                 write!(fmt, "<a id='src-{}' class='srclink' \
                               href='{}' title='{}'>[src]</a>",
@@ -2715,7 +2716,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
         let parentlen = cx.current.len() - if it.is_mod() {1} else {0};
 
         // the sidebar is designed to display sibling functions, modules and
-        // other miscellaneous informations. since there are lots of sibling
+        // other miscellaneous information. since there are lots of sibling
         // items (and that causes quadratic growth in large modules),
         // we refactor common parts into a shared JavaScript file per module.
         // still, we don't move everything into JS because we want to preserve
index b45e059e6d5e9d87c671223d20dfa3da090e6327..303cc671f4a230c79181cd8ad91ad8b66992b237 100644 (file)
@@ -572,14 +572,6 @@ a.test-arrow {
     right: 5px;
 }
 
-.methods .section-header {
-    /* Override parent class attributes. */
-    border-bottom: none !important;
-    font-size: 1.1em !important;
-    margin: 0 0 -5px;
-    padding: 0;
-}
-
 .section-header:hover a:after {
     content: '\2002\00a7\2002';
 }
index e142c78569bd7ad597e3b71bfcb6db88df487023..05ae8ed5b0b66be71b491821fa99be19f2534f74 100644 (file)
@@ -214,6 +214,30 @@ pub fn last_os_error() -> Error {
     }
 
     /// Creates a new instance of an `Error` from a particular OS error code.
+    ///
+    /// # Examples
+    ///
+    /// On Linux:
+    ///
+    /// ```
+    /// # if cfg!(target_os = "linux") {
+    /// use std::io;
+    ///
+    /// let error = io::Error::from_raw_os_error(98);
+    /// assert_eq!(error.kind(), io::ErrorKind::AddrInUse);
+    /// # }
+    /// ```
+    ///
+    /// On Windows:
+    ///
+    /// ```
+    /// # if cfg!(windows) {
+    /// use std::io;
+    ///
+    /// let error = io::Error::from_raw_os_error(10048);
+    /// assert_eq!(error.kind(), io::ErrorKind::AddrInUse);
+    /// # }
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn from_raw_os_error(code: i32) -> Error {
         Error { repr: Repr::Os(code) }
index 1d97611eabb2671261826c8abfa2a090dcc2f886..a408b4378e19e6b14cdb0f68478adf628f527dc0 100644 (file)
@@ -239,7 +239,7 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
         text[..offset].iter().rposition(|elt| *elt == x)
     }
 
-    // test fallback implementations on all plattforms
+    // test fallback implementations on all platforms
     #[test]
     fn matches_one() {
         assert_eq!(Some(0), memchr(b'a', b"a"));
index ad4cdef615847719d2f63ff078b8f36346a302e9..2d19561139b58144d12df7dc4c37f838199a1505 100644 (file)
@@ -1529,8 +1529,7 @@ pub fn parent(&self) -> Option<&Path> {
 
     /// The final component of the path, if it is a normal file.
     ///
-    /// If the path terminates in `.`, `..`, or consists solely of a root of
-    /// prefix, `file_name` will return `None`.
+    /// If the path terminates in `..`, `file_name` will return `None`.
     ///
     /// # Examples
     ///
@@ -1543,6 +1542,17 @@ pub fn parent(&self) -> Option<&Path> {
     ///
     /// assert_eq!(Some(os_str), path.file_name());
     /// ```
+    ///
+    /// # Other examples
+    ///
+    /// ```
+    /// use std::path::Path;
+    /// use std::ffi::OsStr;
+    ///
+    /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.").file_name());
+    /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.//").file_name());
+    /// assert_eq!(None, Path::new("foo.txt/..").file_name());
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn file_name(&self) -> Option<&OsStr> {
         self.components().next_back().and_then(|p| {
index 274e495d70eb620b3f0d4e5f2ea91ee618825dfb..26925b12f93d6ba3d28fb0fb0eee80c1f8d639c0 100644 (file)
@@ -152,9 +152,19 @@ pub fn lookup_host(host: &str) -> io::Result<LookupHost> {
     init();
 
     let c_host = CString::new(host)?;
+    let hints = c::addrinfo {
+        ai_flags: 0,
+        ai_family: 0,
+        ai_socktype: c::SOCK_STREAM,
+        ai_protocol: 0,
+        ai_addrlen: 0,
+        ai_addr: ptr::null_mut(),
+        ai_canonname: ptr::null_mut(),
+        ai_next: ptr::null_mut()
+    };
     let mut res = ptr::null_mut();
     unsafe {
-        cvt_gai(c::getaddrinfo(c_host.as_ptr(), ptr::null(), ptr::null(),
+        cvt_gai(c::getaddrinfo(c_host.as_ptr(), ptr::null(), &hints,
                                &mut res))?;
         Ok(LookupHost { original: res, cur: res })
     }
index eaf82f5f43ded1fe71823722f8ddd4cdada45b7d..ff75149f518abd8c1f16c65519abdb072bffb924 100644 (file)
@@ -33,7 +33,7 @@ fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {
         if self.in_cfg(node.attrs()) { Some(node) } else { None }
     }
 
-    fn process_cfg_attrs<T: HasAttrs>(&mut self, node: T) -> T {
+    pub fn process_cfg_attrs<T: HasAttrs>(&mut self, node: T) -> T {
         node.map_attrs(|attrs| {
             attrs.into_iter().filter_map(|attr| self.process_cfg_attr(attr)).collect()
         })
index c670283e559d9b8c60451dc964c617302ccada20..b2b63d0dbb4bd76b4b8fe8ac3121adf460438d67 100644 (file)
@@ -237,7 +237,7 @@ fn mac_result<'a>(path: &ast::Path, ident: Option<Ident>, tts: Vec<TokenTree>, m
                     },
                 });
 
-                let marked_tts = mark_tts(tts, mark);
+                let marked_tts = mark_tts(&tts, mark);
                 Some(expandfun.expand(fld.cx, call_site, &marked_tts))
             }
 
@@ -257,7 +257,7 @@ fn mac_result<'a>(path: &ast::Path, ident: Option<Ident>, tts: Vec<TokenTree>, m
                     }
                 });
 
-                let marked_tts = mark_tts(tts, mark);
+                let marked_tts = mark_tts(&tts, mark);
                 Some(expander.expand(fld.cx, call_site, ident, marked_tts))
             }
 
@@ -769,7 +769,11 @@ fn expand_annotatable(mut item: Annotatable, fld: &mut MacroExpander) -> SmallVe
             };
 
             fld.cx.bt_pop();
-            modified.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect()
+            let configured = modified.into_iter().flat_map(|it| {
+                it.fold_with(&mut fld.strip_unconfigured())
+            }).collect::<SmallVector<_>>();
+
+            configured.into_iter().flat_map(|it| expand_annotatable(it, fld)).collect()
         }
     }
 }
@@ -1126,7 +1130,7 @@ fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac {
         Spanned {
             node: Mac_ {
                 path: self.fold_path(node.path),
-                tts: self.fold_tts(node.tts),
+                tts: self.fold_tts(&node.tts),
             },
             span: self.new_span(span),
         }
@@ -1141,7 +1145,7 @@ fn new_span(&mut self, mut span: Span) -> Span {
 }
 
 // apply a given mark to the given token trees. Used prior to expansion of a macro.
-fn mark_tts(tts: Vec<TokenTree>, m: Mrk) -> Vec<TokenTree> {
+fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> {
     noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
 }
 
index 68527b0797d5b6031da5ca6dcc4a2d9eb2c24341..ffc950d76dd27f43047bb06e54def08e5f265266 100644 (file)
@@ -32,6 +32,7 @@ pub mod rt {
     use ext::base::ExtCtxt;
     use parse::{self, token, classify};
     use ptr::P;
+    use std::rc::Rc;
 
     use tokenstream::{self, TokenTree};
 
@@ -215,12 +216,12 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
             if self.node.style == ast::AttrStyle::Inner {
                 r.push(TokenTree::Token(self.span, token::Not));
             }
-            r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
+            r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
                 delim: token::Bracket,
                 open_span: self.span,
                 tts: self.node.value.to_tokens(cx),
                 close_span: self.span,
-            }));
+            })));
             r
         }
     }
@@ -235,12 +236,12 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
 
     impl ToTokens for () {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
+            vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
                 delim: token::Paren,
                 open_span: DUMMY_SP,
                 tts: vec![],
                 close_span: DUMMY_SP,
-            })]
+            }))]
         }
     }
 
@@ -791,9 +792,14 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
                                 id_ext("tokenstream"),
                                 id_ext("SequenceRepetition")];
             let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
+            let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
+                                                        id_ext("rc"),
+                                                        id_ext("Rc"),
+                                                        id_ext("new")],
+                                                   vec![e_seq_struct]);
             let e_tok = cx.expr_call(sp,
                                      mk_tt_path(cx, sp, "Sequence"),
-                                     vec!(e_sp, e_seq_struct));
+                                     vec!(e_sp, e_rc_new));
             let e_push =
                 cx.expr_method_call(sp,
                                     cx.expr_ident(sp, id_ext("tt")),
index 23f0b1fff0ae72b79c28763d4bfab6befbfc8706..84572b84963f3812511a2efdb4c8622d875a4297 100644 (file)
@@ -28,6 +28,7 @@
 use std::cell::RefCell;
 use std::collections::{HashMap};
 use std::collections::hash_map::{Entry};
+use std::rc::Rc;
 
 struct ParserAnyMacro<'a> {
     parser: RefCell<Parser<'a>>,
@@ -262,7 +263,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
     let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt"));
     let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt"));
     let argument_gram = vec![
-        TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
             tts: vec![
                 TokenTree::Token(DUMMY_SP, match_lhs_tok),
                 TokenTree::Token(DUMMY_SP, token::FatArrow),
@@ -271,14 +272,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
             separator: Some(token::Semi),
             op: tokenstream::KleeneOp::OneOrMore,
             num_captures: 2,
-        }),
+        })),
         // to phase into semicolon-termination instead of semicolon-separation
-        TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
             tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
             separator: None,
             op: tokenstream::KleeneOp::ZeroOrMore,
             num_captures: 0
-        }),
+        })),
     ];
 
     // Parse the macro_rules! invocation (`none` is for no interpolations):
index 40944a9a1c2d360bf13c78a9b1a45d742f2fb998..7c0d10669f30e108e6e173510ebbd2a9e8fc601c 100644 (file)
@@ -79,11 +79,11 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
     let mut r = TtReader {
         sp_diag: sp_diag,
         stack: vec!(TtFrame {
-            forest: TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+            forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
                 tts: src,
                 // doesn't matter. This merely holds the root unzipping.
                 separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
-            }),
+            })),
             idx: 0,
             dotdotdoted: false,
             sep: None,
index ed6f09eed645f7741951542aaeea87665667266c..ac3d643b185cac725fd7139d14ae4541970a3be6 100644 (file)
@@ -28,6 +28,8 @@
 use util::small_vector::SmallVector;
 use util::move_map::MoveMap;
 
+use std::rc::Rc;
+
 pub trait Folder : Sized {
     // Any additions to this trait should happen in form
     // of a call to a public `noop_*` function that only calls
@@ -222,11 +224,11 @@ fn fold_ty_params(&mut self, tps: P<[TyParam]>) -> P<[TyParam]> {
         noop_fold_ty_params(tps, self)
     }
 
-    fn fold_tt(&mut self, tt: TokenTree) -> TokenTree {
+    fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree {
         noop_fold_tt(tt, self)
     }
 
-    fn fold_tts(&mut self, tts: Vec<TokenTree>) -> Vec<TokenTree> {
+    fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec<TokenTree> {
         noop_fold_tts(tts, self)
     }
 
@@ -501,7 +503,7 @@ pub fn noop_fold_mac<T: Folder>(Spanned {node, span}: Mac, fld: &mut T) -> Mac {
     Spanned {
         node: Mac_ {
             path: fld.fold_path(node.path),
-            tts: fld.fold_tts(node.tts),
+            tts: fld.fold_tts(&node.tts),
         },
         span: fld.new_span(span)
     }
@@ -528,26 +530,32 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
     }
 }
 
-pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
-    match tt {
+pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
+    match *tt {
         TokenTree::Token(span, ref tok) =>
             TokenTree::Token(span, fld.fold_token(tok.clone())),
-        TokenTree::Delimited(span, delimed) => TokenTree::Delimited(span, Delimited {
-            delim: delimed.delim,
-            open_span: delimed.open_span,
-            tts: fld.fold_tts(delimed.tts),
-            close_span: delimed.close_span,
-        }),
-        TokenTree::Sequence(span, seq) => TokenTree::Sequence(span, SequenceRepetition {
-            tts: fld.fold_tts(seq.tts),
-            separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
-            ..seq
-        }),
+        TokenTree::Delimited(span, ref delimed) => {
+            TokenTree::Delimited(span, Rc::new(
+                            Delimited {
+                                delim: delimed.delim,
+                                open_span: delimed.open_span,
+                                tts: fld.fold_tts(&delimed.tts),
+                                close_span: delimed.close_span,
+                            }
+                        ))
+        },
+        TokenTree::Sequence(span, ref seq) =>
+            TokenTree::Sequence(span,
+                       Rc::new(SequenceRepetition {
+                           tts: fld.fold_tts(&seq.tts),
+                           separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
+                           ..**seq
+                       })),
     }
 }
 
-pub fn noop_fold_tts<T: Folder>(tts: Vec<TokenTree>, fld: &mut T) -> Vec<TokenTree> {
-    tts.move_map(|tt| fld.fold_tt(tt))
+pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
+    tts.iter().map(|tt| fld.fold_tt(tt)).collect()
 }
 
 // apply ident folder if it's an ident, apply other folds to interpolated nodes
@@ -605,7 +613,7 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
             token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
         token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
         token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
-        token::NtTT(tt) => token::NtTT(tt.map(|tt| fld.fold_tt(tt))),
+        token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
         token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
         token::NtImplItem(arm) =>
             token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm)
index bbcc044d43c6b74b8701924847df75867f0a8ec6..9502bc48a3e110f84e0ddf107a3a3181ec92c06d 100644 (file)
@@ -662,6 +662,7 @@ pub fn integer_lit(s: &str,
 #[cfg(test)]
 mod tests {
     use super::*;
+    use std::rc::Rc;
     use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
     use codemap::Spanned;
     use ast::{self, PatKind};
@@ -763,7 +764,7 @@ fn string_to_tts_macro () {
                             )
                             if first_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
-                            _ => panic!("value 3: {:?}", *first_delimed),
+                            _ => panic!("value 3: {:?}", **first_delimed),
                         }
                         let tts = &second_delimed.tts[..];
                         match (tts.len(), tts.get(0), tts.get(1)) {
@@ -774,10 +775,10 @@ fn string_to_tts_macro () {
                             )
                             if second_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
-                            _ => panic!("value 4: {:?}", *second_delimed),
+                            _ => panic!("value 4: {:?}", **second_delimed),
                         }
                     },
-                    _ => panic!("value 2: {:?}", *macro_delimed),
+                    _ => panic!("value 2: {:?}", **macro_delimed),
                 }
             },
             _ => panic!("value: {:?}",tts),
@@ -793,7 +794,7 @@ fn string_to_tts_1() {
             TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
-                tokenstream::Delimited {
+                Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
@@ -802,10 +803,10 @@ fn string_to_tts_1() {
                         TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
                     ],
                     close_span: sp(13, 14),
-                }),
+                })),
             TokenTree::Delimited(
                 sp(15, 21),
-                tokenstream::Delimited {
+                Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
@@ -813,7 +814,7 @@ fn string_to_tts_1() {
                         TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
-                })
+                }))
         ];
 
         assert_eq!(tts, expected);
index 6fa95afd9fb2223c9dec5dc1565cb68a40db4e7f..e4875b7c244fd9cb8f641fee92312b9954a0dc07 100644 (file)
@@ -495,64 +495,6 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
         }
     }
 
-    /// Check for erroneous `ident { }`; if matches, signal error and
-    /// recover (without consuming any expected input token).  Returns
-    /// true if and only if input was consumed for recovery.
-    pub fn check_for_erroneous_unit_struct_expecting(&mut self,
-                                                     expected: &[token::Token])
-                                                     -> bool {
-        if self.token == token::OpenDelim(token::Brace)
-            && expected.iter().all(|t| *t != token::OpenDelim(token::Brace))
-            && self.look_ahead(1, |t| *t == token::CloseDelim(token::Brace)) {
-            // matched; signal non-fatal error and recover.
-            let span = self.span;
-            self.span_err(span, "unit-like struct construction is written with no trailing `{ }`");
-            self.eat(&token::OpenDelim(token::Brace));
-            self.eat(&token::CloseDelim(token::Brace));
-            true
-        } else {
-            false
-        }
-    }
-
-    /// Commit to parsing a complete expression `e` expected to be
-    /// followed by some token from the set edible + inedible.  Recover
-    /// from anticipated input errors, discarding erroneous characters.
-    pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token],
-                       inedible: &[token::Token]) -> PResult<'a, ()> {
-        debug!("commit_expr {:?}", e);
-        if let ExprKind::Path(..) = e.node {
-            // might be unit-struct construction; check for recoverableinput error.
-            let expected = edible.iter()
-                .cloned()
-                .chain(inedible.iter().cloned())
-                .collect::<Vec<_>>();
-            self.check_for_erroneous_unit_struct_expecting(&expected[..]);
-        }
-        self.expect_one_of(edible, inedible)
-    }
-
-    pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) -> PResult<'a, ()> {
-        self.commit_expr(e, &[edible], &[])
-    }
-
-    /// Commit to parsing a complete statement `s`, which expects to be
-    /// followed by some token from the set edible + inedible.  Check
-    /// for recoverable input errors, discarding erroneous characters.
-    pub fn commit_stmt(&mut self, edible: &[token::Token],
-                       inedible: &[token::Token]) -> PResult<'a, ()> {
-        if self.last_token
-               .as_ref()
-               .map_or(false, |t| t.is_ident() || t.is_path()) {
-            let expected = edible.iter()
-                .cloned()
-                .chain(inedible.iter().cloned())
-                .collect::<Vec<_>>();
-            self.check_for_erroneous_unit_struct_expecting(&expected);
-        }
-        self.expect_one_of(edible, inedible)
-    }
-
     /// returns the span of expr, if it was not interpolated or the span of the interpolated token
     fn interpolated_or_expr_span(&self,
                                  expr: PResult<'a, P<Expr>>)
@@ -1247,7 +1189,7 @@ pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> {
             let default = if self.check(&token::Eq) {
                 self.bump();
                 let expr = self.parse_expr()?;
-                self.commit_expr_expecting(&expr, token::Semi)?;
+                self.expect(&token::Semi)?;
                 Some(expr)
             } else {
                 self.expect(&token::Semi)?;
@@ -2195,8 +2137,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                 let mut trailing_comma = false;
                 while self.token != token::CloseDelim(token::Paren) {
                     es.push(self.parse_expr()?);
-                    self.commit_expr(&es.last().unwrap(), &[],
-                                     &[token::Comma, token::CloseDelim(token::Paren)])?;
+                    self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
                     if self.check(&token::Comma) {
                         trailing_comma = true;
 
@@ -2407,9 +2348,8 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                                     }
                                 }
 
-                                match self.commit_expr(&fields.last().unwrap().expr,
-                                                       &[token::Comma],
-                                                       &[token::CloseDelim(token::Brace)]) {
+                                match self.expect_one_of(&[token::Comma],
+                                                         &[token::CloseDelim(token::Brace)]) {
                                     Ok(()) => {}
                                     Err(mut e) => {
                                         e.emit();
@@ -2662,7 +2602,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: BytePos) -> PResult<
                 self.bump();
                 let ix = self.parse_expr()?;
                 hi = self.span.hi;
-                self.commit_expr_expecting(&ix, token::CloseDelim(token::Bracket))?;
+                self.expect(&token::CloseDelim(token::Bracket))?;
                 let index = self.mk_index(e, ix);
                 e = self.mk_expr(lo, hi, index, ThinVec::new())
               }
@@ -2688,12 +2628,13 @@ fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
                     )?;
                     let (sep, repeat) = self.parse_sep_and_kleene_op()?;
                     let name_num = macro_parser::count_names(&seq);
-                    return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), SequenceRepetition {
-                        tts: seq,
-                        separator: sep,
-                        op: repeat,
-                        num_captures: name_num
-                    }));
+                    return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
+                                      Rc::new(SequenceRepetition {
+                                          tts: seq,
+                                          separator: sep,
+                                          op: repeat,
+                                          num_captures: name_num
+                                      })));
                 } else if self.token.is_keyword(keywords::Crate) {
                     self.bump();
                     return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
@@ -2848,12 +2789,12 @@ pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                     _ => {}
                 }
 
-                Ok(TokenTree::Delimited(span, Delimited {
+                Ok(TokenTree::Delimited(span, Rc::new(Delimited {
                     delim: delim,
                     open_span: open_span,
                     tts: tts,
                     close_span: close_span,
-                }))
+                })))
             },
             _ => {
                 // invariants: the current token is not a left-delimiter,
@@ -3328,8 +3269,7 @@ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<E
         let lo = self.last_span.lo;
         let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL,
                                                None)?;
-        if let Err(mut e) = self.commit_expr_expecting(&discriminant,
-                                                       token::OpenDelim(token::Brace)) {
+        if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) {
             if self.token == token::Token::Semi {
                 e.span_note(match_span, "did you mean to remove this `match` keyword?");
             }
@@ -3375,7 +3315,7 @@ pub fn parse_arm(&mut self) -> PResult<'a, Arm> {
             && self.token != token::CloseDelim(token::Brace);
 
         if require_comma {
-            self.commit_expr(&expr, &[token::Comma], &[token::CloseDelim(token::Brace)])?;
+            self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])?;
         } else {
             self.eat(&token::Comma);
         }
@@ -4117,7 +4057,7 @@ fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P<
                 _ => { // all other kinds of statements:
                     let mut hi = span.hi;
                     if classify::stmt_ends_with_semi(&node) {
-                        self.commit_stmt(&[token::Semi], &[])?;
+                        self.expect(&token::Semi)?;
                         hi = self.last_span.hi;
                     }
 
@@ -4195,7 +4135,7 @@ fn handle_expression_like_statement(&mut self,
         if classify::expr_requires_semi_to_be_stmt(&e) {
             // Just check for errors and recover; do not eat semicolon yet.
             if let Err(mut e) =
-                self.commit_stmt(&[], &[token::Semi, token::CloseDelim(token::Brace)])
+                self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)])
             {
                 e.emit();
                 self.recover_stmt();
@@ -4862,7 +4802,7 @@ pub fn parse_impl_item(&mut self) -> PResult<'a, ImplItem> {
             let typ = self.parse_ty_sum()?;
             self.expect(&token::Eq)?;
             let expr = self.parse_expr()?;
-            self.commit_expr_expecting(&expr, token::Semi)?;
+            self.expect(&token::Semi)?;
             (name, ast::ImplItemKind::Const(typ, expr))
         } else {
             let (name, inner_attrs, node) = self.parse_impl_method(&vis)?;
@@ -5286,7 +5226,7 @@ fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
         let ty = self.parse_ty_sum()?;
         self.expect(&token::Eq)?;
         let e = self.parse_expr()?;
-        self.commit_expr_expecting(&e, token::Semi)?;
+        self.expect(&token::Semi)?;
         let item = match m {
             Some(m) => ItemKind::Static(ty, m, e),
             None => ItemKind::Const(ty, e),
@@ -5296,15 +5236,22 @@ fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> {
 
     /// Parse a `mod <foo> { ... }` or `mod <foo>;` item
     fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> {
+        let outer_attrs = ::config::StripUnconfigured {
+            config: &self.cfg,
+            sess: self.sess,
+            should_test: false, // irrelevant
+            features: None, // don't perform gated feature checking
+        }.process_cfg_attrs(outer_attrs.to_owned());
+
         let id_span = self.span;
         let id = self.parse_ident()?;
         if self.check(&token::Semi) {
             self.bump();
             // This mod is in an external file. Let's go get it!
-            let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span)?;
+            let (m, attrs) = self.eval_src_mod(id, &outer_attrs, id_span)?;
             Ok((id, m, Some(attrs)))
         } else {
-            self.push_mod_path(id, outer_attrs);
+            self.push_mod_path(id, &outer_attrs);
             self.expect(&token::OpenDelim(token::Brace))?;
             let mod_inner_lo = self.span.lo;
             let attrs = self.parse_inner_attributes()?;
index 35377d14bab7cc0fe4618fb7be541311b1a987c9..0ad09fd0f7dfba492e8146a64a17ed0a25eb1f69 100644 (file)
@@ -8,18 +8,36 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! # Token Trees
-//! TokenTrees are syntactic forms for dealing with tokens. The description below is
-//! more complete; in short a TokenTree is a single token, a delimited sequence of token
-//! trees, or a sequence with repetition for list splicing as part of macro expansion.
+//! # Token Streams
+//!
+//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
+//! which are themselves either a single Token, a Delimited subsequence of tokens,
+//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
+//! expansion).
+//!
+//! A TokenStream also has a slice view, `TokenSlice`, that is analogous to `str` for
+//! `String`: it allows the programmer to divvy up, explore, and otherwise partition a
+//! TokenStream as borrowed subsequences.
 
-use ast::{AttrStyle};
-use codemap::{Span};
+use ast::{self, AttrStyle, LitKind};
+use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION};
+use codemap::Spanned;
 use ext::base;
 use ext::tt::macro_parser;
 use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use parse::lexer;
-use parse::token;
+use parse;
+use parse::token::{self, Token, Lit, InternedString, Nonterminal};
+use parse::token::Lit as TokLit;
+
+use std::fmt;
+use std::mem;
+use std::ops::Index;
+use std::ops;
+use std::iter::*;
+
+use std::rc::Rc;
 
 /// A delimited sequence of token trees
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -54,6 +72,11 @@ pub fn open_tt(&self) -> TokenTree {
     pub fn close_tt(&self) -> TokenTree {
         TokenTree::Token(self.close_span, self.close_token())
     }
+
+    /// Returns the token trees inside the delimiters.
+    pub fn subtrees(&self) -> &[TokenTree] {
+        &self.tts
+    }
 }
 
 /// A sequence of token trees
@@ -89,18 +112,16 @@ pub enum KleeneOp {
 ///
 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
 pub enum TokenTree {
     /// A single token
     Token(Span, token::Token),
     /// A delimited sequence of token trees
-    Delimited(Span, Delimited),
+    Delimited(Span, Rc<Delimited>),
 
     // This only makes sense in MBE macros.
-
     /// A kleene-style repetition sequence with a span
-    // FIXME(eddyb) #12938 Use DST.
-    Sequence(Span, SequenceRepetition),
+    Sequence(Span, Rc<SequenceRepetition>),
 }
 
 impl TokenTree {
@@ -109,28 +130,22 @@ pub fn len(&self) -> usize {
             TokenTree::Token(_, token::DocComment(name)) => {
                 match doc_comment_style(&name.as_str()) {
                     AttrStyle::Outer => 2,
-                    AttrStyle::Inner => 3
+                    AttrStyle::Inner => 3,
                 }
             }
             TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
             TokenTree::Token(_, token::MatchNt(..)) => 3,
-            TokenTree::Delimited(_, ref delimed) => {
-                delimed.tts.len() + 2
-            }
-            TokenTree::Sequence(_, ref seq) => {
-                seq.tts.len()
-            }
-            TokenTree::Token(..) => 0
+            TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
+            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
+            TokenTree::Token(..) => 0,
         }
     }
 
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
-            (&TokenTree::Token(sp, token::DocComment(_)), 0) => {
-                TokenTree::Token(sp, token::Pound)
-            }
+            (&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound),
             (&TokenTree::Token(sp, token::DocComment(name)), 1)
-            if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
+                if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
                 TokenTree::Token(sp, token::Not)
             }
             (&TokenTree::Token(sp, token::DocComment(name)), _) => {
@@ -138,18 +153,21 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
 
                 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
                 // required to wrap the text.
-                let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
-                    *cnt = if x == '"' {
-                        1
-                    } else if *cnt != 0 && x == '#' {
-                        *cnt + 1
-                    } else {
-                        0
-                    };
-                    Some(*cnt)
-                }).max().unwrap_or(0);
+                let num_of_hashes = stripped.chars()
+                    .scan(0, |cnt, x| {
+                        *cnt = if x == '"' {
+                            1
+                        } else if *cnt != 0 && x == '#' {
+                            *cnt + 1
+                        } else {
+                            0
+                        };
+                        Some(*cnt)
+                    })
+                    .max()
+                    .unwrap_or(0);
 
-                TokenTree::Delimited(sp, Delimited {
+                TokenTree::Delimited(sp, Rc::new(Delimited {
                     delim: token::Bracket,
                     open_span: sp,
                     tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
@@ -157,7 +175,7 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
                               TokenTree::Token(sp, token::Literal(
                                   token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
                     close_span: sp,
-                })
+                }))
             }
             (&TokenTree::Delimited(_, ref delimed), _) => {
                 if index == 0 {
@@ -179,24 +197,24 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
                          TokenTree::Token(sp, token::Ident(kind))];
                 v[index].clone()
             }
-            (&TokenTree::Sequence(_, ref seq), _) => {
-                seq.tts[index].clone()
-            }
-            _ => panic!("Cannot expand a token tree")
+            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
+            _ => panic!("Cannot expand a token tree"),
         }
     }
 
     /// Returns the `Span` corresponding to this token tree.
     pub fn get_span(&self) -> Span {
         match *self {
-            TokenTree::Token(span, _)     => span,
+            TokenTree::Token(span, _) => span,
             TokenTree::Delimited(span, _) => span,
-            TokenTree::Sequence(span, _)  => span,
+            TokenTree::Sequence(span, _) => span,
         }
     }
 
     /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
+    pub fn parse(cx: &base::ExtCtxt,
+                 mtch: &[TokenTree],
+                 tts: &[TokenTree])
                  -> macro_parser::NamedParseResult {
         // `None` is because we're not interpolating
         let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
@@ -206,5 +224,1071 @@ pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
                                                          true);
         macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
     }
+
+    /// Check if this TokenTree is equal to the other, regardless of span information.
+    pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
+        match (self, other) {
+            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
+            (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
+                (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
+                {
+                    for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
+                        if !tt1.eq_unspanned(tt2) {
+                            return false;
+                        }
+                    }
+                    true
+                }
+            }
+            (_, _) => false,
+        }
+    }
+
+    /// Retrieve the TokenTree's span.
+    pub fn span(&self) -> Span {
+        match *self {
+            TokenTree::Token(sp, _) |
+            TokenTree::Delimited(sp, _) |
+            TokenTree::Sequence(sp, _) => sp,
+        }
+    }
+
+    /// Indicates if the stream is a token that is equal to the provided token.
+    pub fn eq_token(&self, t: Token) -> bool {
+        match *self {
+            TokenTree::Token(_, ref tk) => *tk == t,
+            _ => false,
+        }
+    }
+
+    /// Indicates if the token is an identifier.
+    pub fn is_ident(&self) -> bool {
+        self.maybe_ident().is_some()
+    }
+
+    /// Returns an identifier.
+    pub fn maybe_ident(&self) -> Option<ast::Ident> {
+        match *self {
+            TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
+            TokenTree::Delimited(_, ref dl) => {
+                let tts = dl.subtrees();
+                if tts.len() != 1 {
+                    return None;
+                }
+                tts[0].maybe_ident()
+            }
+            _ => None,
+        }
+    }
+
+    /// Returns a Token literal.
+    pub fn maybe_lit(&self) -> Option<token::Lit> {
+        match *self {
+            TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
+            TokenTree::Delimited(_, ref dl) => {
+                let tts = dl.subtrees();
+                if tts.len() != 1 {
+                    return None;
+                }
+                tts[0].maybe_lit()
+            }
+            _ => None,
+        }
+    }
+
+    /// Returns an AST string literal.
+    pub fn maybe_str(&self) -> Option<ast::Lit> {
+        match *self {
+            TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
+                                     ast::StrStyle::Cooked);
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
+                                     ast::StrStyle::Raw(n));
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            _ => None,
+        }
+    }
+}
+
+/// #Token Streams
+///
+/// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural
+/// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we
+/// are going to cut a few corners (i.e., use some of the AST structure) when we need to
+/// for backwards compatibility.
+
+/// TokenStreams are collections of TokenTrees that represent a syntactic structure. The
+/// struct itself shouldn't be directly manipulated; the internal structure is not stable,
+/// and may be changed at any time in the future. The operators will not, however (except
+/// for signatures, later on).
+#[derive(Eq,Clone,Hash,RustcEncodable,RustcDecodable)]
+pub struct TokenStream {
+    pub span: Span,
+    pub tts: Vec<TokenTree>,
+}
+
+impl fmt::Debug for TokenStream {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        if self.tts.len() == 0 {
+            write!(f, "([empty")?;
+        } else {
+            write!(f, "([")?;
+            write!(f, "{:?}", self.tts[0])?;
+
+            for tt in self.tts.iter().skip(1) {
+                write!(f, ",{:?}", tt)?;
+            }
+        }
+        write!(f, "|")?;
+        self.span.fmt(f)?;
+        write!(f, "])")
+    }
+}
+
+/// Checks if two TokenStreams are equivalent (including spans). For unspanned
+/// equality, see `eq_unspanned`.
+impl PartialEq<TokenStream> for TokenStream {
+    fn eq(&self, other: &TokenStream) -> bool {
+        self.tts == other.tts
+    }
+}
+
+// NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span
+// will be at {2,13}. Without finer-grained span structures, however, this seems to be
+// our only recourse.
+// FIXME Do something smarter to compute the expansion id.
+fn covering_span(trees: &[TokenTree]) -> Span {
+    // disregard any dummy spans we have
+    let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>();
+
+    // if we're out of spans, stop
+    if trees.len() < 1 {
+        return DUMMY_SP;
+    }
+
+    // set up the initial values
+    let fst_span = trees[0].span();
+
+    let mut lo_span = fst_span.lo;
+    let mut hi_span = fst_span.hi;
+    let mut expn_id = fst_span.expn_id;
+
+    // compute the spans iteratively
+    for t in trees.iter().skip(1) {
+        let sp = t.span();
+        if sp.lo < lo_span {
+            lo_span = sp.lo;
+        }
+        if hi_span < sp.hi {
+            hi_span = sp.hi;
+        }
+        if expn_id != sp.expn_id {
+            expn_id = NO_EXPANSION;
+        }
+    }
+
+    Span {
+        lo: lo_span,
+        hi: hi_span,
+        expn_id: expn_id,
+    }
 }
 
+/// TokenStream operators include basic destructuring, boolean operations, `maybe_...`
+/// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward,
+/// indicating information about the structure of the stream. The `maybe_...` operations
+/// return `Some<...>` if the tokenstream contains the appropriate item.
+///
+/// Similarly, the `maybe_..._prefix` operations potentially return a
+/// partially-destructured stream as a pair where the first element is the expected item
+/// and the second is the remainder of the stream. As anb example,
+///
+///    `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")`
+impl TokenStream {
+    /// Convert a vector of `TokenTree`s into a `TokenStream`.
+    pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream {
+        let span = covering_span(&trees);
+        TokenStream {
+            tts: trees,
+            span: span,
+        }
+    }
+
+    /// Copies all of the TokenTrees from the TokenSlice, appending them to the stream.
+    pub fn append_stream(mut self, ts2: &TokenSlice) {
+        for tt in ts2.iter() {
+            self.tts.push(tt.clone());
+        }
+        self.span = covering_span(&self.tts[..]);
+    }
+
+    /// Manually change a TokenStream's span.
+    pub fn respan(self, span: Span) -> TokenStream {
+        TokenStream {
+            tts: self.tts,
+            span: span,
+        }
+    }
+
+    /// Construct a TokenStream from an ast literal.
+    pub fn from_ast_lit_str(lit: ast::Lit) -> Option<TokenStream> {
+        match lit.node {
+            LitKind::Str(val, _) => {
+                let val = TokLit::Str_(token::intern(&val));
+                Some(TokenStream::from_tts(vec![TokenTree::Token(lit.span,
+                                                                 Token::Literal(val, None))]))
+            }
+            _ => None,
+        }
+
+    }
+
+    /// Convert a vector of TokenTrees into a parentheses-delimited TokenStream.
+    pub fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream {
+        let new_sp = covering_span(&tts);
+
+        let new_delim = Rc::new(Delimited {
+            delim: token::DelimToken::Paren,
+            open_span: DUMMY_SP,
+            tts: tts,
+            close_span: DUMMY_SP,
+        });
+
+        TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)])
+    }
+
+    /// Convert an interned string into a one-element TokenStream.
+    pub fn from_interned_string_as_ident(s: InternedString) -> TokenStream {
+        TokenStream::from_tts(vec![TokenTree::Token(DUMMY_SP,
+                                                    Token::Ident(token::str_to_ident(&s[..])))])
+    }
+}
+
+/// TokenSlices are 'views' of `TokenStream's; they fit the same role as `str`s do for
+/// `String`s. In general, most TokenStream manipulations will be refocusing their internal
+/// contents by taking a TokenSlice and then using indexing and the provided operators.
+#[derive(PartialEq, Eq, Debug)]
+pub struct TokenSlice([TokenTree]);
+
+impl ops::Deref for TokenStream {
+    type Target = TokenSlice;
+
+    fn deref(&self) -> &TokenSlice {
+        let tts: &[TokenTree] = &*self.tts;
+        unsafe { mem::transmute(tts) }
+    }
+}
+
+impl TokenSlice {
+    /// Convert a borrowed TokenTree slice into a borrowed TokenSlice.
+    fn from_tts(tts: &[TokenTree]) -> &TokenSlice {
+        unsafe { mem::transmute(tts) }
+    }
+
+    /// Indicates whether the `TokenStream` is empty.
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Return the `TokenSlice`'s length.
+    pub fn len(&self) -> usize {
+        self.0.len()
+    }
+
+    /// Check equality versus another TokenStream, ignoring span information.
+    pub fn eq_unspanned(&self, other: &TokenSlice) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+        for (tt1, tt2) in self.iter().zip(other.iter()) {
+            if !tt1.eq_unspanned(tt2) {
+                return false;
+            }
+        }
+        true
+    }
+
+    /// Compute a span that covers the entire TokenSlice (eg, one wide enough to include
+    /// the entire slice). If the inputs share expansion identification, it is preserved.
+    /// If they do not, it is discarded.
+    pub fn covering_span(&self) -> Span {
+        covering_span(&self.0)
+    }
+
+    /// Indicates where the stream is of the form `= <ts>`, where `<ts>` is a continued
+    /// `TokenStream`.
+    pub fn is_assignment(&self) -> bool {
+        self.maybe_assignment().is_some()
+    }
+
+    /// Returns the RHS of an assigment.
+    pub fn maybe_assignment(&self) -> Option<&TokenSlice> {
+        if !(self.len() > 1) {
+            return None;
+        }
+
+        Some(&self[1..])
+    }
+
+    /// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or
+    /// `{a,b,c}`).
+    pub fn is_delimited(&self) -> bool {
+        self.maybe_delimited().is_some()
+    }
+
+    /// Returns the inside of the delimited term as a new TokenStream.
+    pub fn maybe_delimited(&self) -> Option<&TokenSlice> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Delimited(_, ref rc) => Some(TokenSlice::from_tts(&*rc.tts)),
+            _ => None,
+        }
+    }
+
+    /// Returns a list of `TokenSlice`s if the stream is a delimited list, breaking the
+    /// stream on commas.
+    pub fn maybe_comma_list(&self) -> Option<Vec<&TokenSlice>> {
+        let maybe_tts = self.maybe_delimited();
+
+        let ts: &TokenSlice;
+        match maybe_tts {
+            Some(t) => {
+                ts = t;
+            }
+            None => {
+                return None;
+            }
+        }
+
+        let splits: Vec<&TokenSlice> = ts.split(|x| match *x {
+                TokenTree::Token(_, Token::Comma) => true,
+                _ => false,
+            })
+            .filter(|x| x.len() > 0)
+            .collect();
+
+        Some(splits)
+    }
+
+    /// Returns a Nonterminal if it is Interpolated.
+    pub fn maybe_interpolated_nonterminal(&self) -> Option<Nonterminal> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Token(_, Token::Interpolated(ref nt)) => Some(nt.clone()),
+            _ => None,
+        }
+    }
+
+    /// Indicates if the stream is exactly one identifier.
+    pub fn is_ident(&self) -> bool {
+        self.maybe_ident().is_some()
+    }
+
+    /// Returns an identifier
+    pub fn maybe_ident(&self) -> Option<ast::Ident> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        let tok = if let Some(tts) = self.maybe_delimited() {
+            if tts.len() != 1 {
+                return None;
+            }
+            &tts[0]
+        } else {
+            &self[0]
+        };
+
+        match *tok {
+            TokenTree::Token(_, Token::Ident(t)) => Some(t),
+            _ => None,
+        }
+    }
+
+    /// Indicates if the stream is exactly one literal
+    pub fn is_lit(&self) -> bool {
+        self.maybe_lit().is_some()
+    }
+
+    /// Returns a literal
+    pub fn maybe_lit(&self) -> Option<token::Lit> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        let tok = if let Some(tts) = self.maybe_delimited() {
+            if tts.len() != 1 {
+                return None;
+            }
+            &tts[0]
+        } else {
+            &self[0]
+        };
+
+        match *tok {
+            TokenTree::Token(_, Token::Literal(l, _)) => Some(l),
+            _ => None,
+        }
+    }
+
+    /// Returns an AST string literal if the TokenStream is either a normal ('cooked') or
+    /// raw string literal.
+    pub fn maybe_str(&self) -> Option<ast::Lit> {
+        if !(self.len() == 1) {
+            return None;
+        }
+
+        match self[0] {
+            TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
+                                     ast::StrStyle::Cooked);
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
+                let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
+                                     ast::StrStyle::Raw(n));
+                Some(Spanned {
+                    node: l,
+                    span: sp,
+                })
+            }
+            _ => None,
+        }
+    }
+
+    /// This operation extracts the path prefix , returning an AST path struct and the remainder
+    /// of the stream (if it finds one). To be more specific, a tokenstream that has a valid,
+    /// non-global path as a prefix (eg `foo(bar, baz)`, `foo::bar(bar)`, but *not*
+    /// `::foo::bar(baz)`) will yield the path and the remaining tokens (as a slice). The previous
+    /// examples will yield
+    /// `Some((Path { segments = vec![foo], ... }, [(bar, baz)]))`,
+    /// `Some((Path { segments = vec![foo, bar] }, [(baz)]))`,
+    /// and `None`, respectively.
+    pub fn maybe_path_prefix(&self) -> Option<(ast::Path, &TokenSlice)> {
+        let mut segments: Vec<ast::PathSegment> = Vec::new();
+
+        let path: Vec<&TokenTree> = self.iter()
+            .take_while(|x| x.is_ident() || x.eq_token(Token::ModSep))
+            .collect::<Vec<&TokenTree>>();
+
+        let path_size = path.len();
+        if path_size == 0 {
+            return None;
+        }
+
+        let cov_span = self[..path_size].covering_span();
+        let rst = &self[path_size..];
+
+        let fst_id = path[0];
+
+        if let Some(id) = fst_id.maybe_ident() {
+            segments.push(ast::PathSegment {
+                identifier: id,
+                parameters: ast::PathParameters::none(),
+            });
+        } else {
+            return None;
+        }
+
+        // Let's use a state machine to parse out the rest.
+        enum State {
+            Mod, // Expect a `::`, or return None otherwise.
+            Ident, // Expect an ident, or return None otherwise.
+        }
+        let mut state = State::Mod;
+
+        for p in &path[1..] {
+            match state {
+                State::Mod => {
+                    // State 0: ['::' -> state 1, else return None]
+                    if p.eq_token(Token::ModSep) {
+                        state = State::Ident;
+                    } else {
+                        return None;
+                    }
+                }
+                State::Ident => {
+                    // State 1: [ident -> state 0, else return None]
+                    if let Some(id) = p.maybe_ident() {
+                        segments.push(ast::PathSegment {
+                            identifier: id,
+                            parameters: ast::PathParameters::none(),
+                        });
+                        state = State::Mod;
+                    } else {
+                        return None;
+                    }
+                }
+            }
+        }
+
+        let path = ast::Path {
+            span: cov_span,
+            global: false,
+            segments: segments,
+        };
+        Some((path, rst))
+    }
+
+    /// Returns an iterator over a TokenSlice (as a sequence of TokenStreams).
+    fn iter(&self) -> Iter {
+        Iter { vs: self }
+    }
+
+    /// Splits a TokenSlice based on the provided `&TokenTree -> bool` predicate.
+    fn split<P>(&self, pred: P) -> Split<P>
+        where P: FnMut(&TokenTree) -> bool
+    {
+        Split {
+            vs: self,
+            pred: pred,
+            finished: false,
+        }
+    }
+}
+
+pub struct Iter<'a> {
+    vs: &'a TokenSlice,
+}
+
+impl<'a> Iterator for Iter<'a> {
+    type Item = &'a TokenTree;
+
+    fn next(&mut self) -> Option<&'a TokenTree> {
+        if self.vs.is_empty() {
+            return None;
+        }
+
+        let ret = Some(&self.vs[0]);
+        self.vs = &self.vs[1..];
+        ret
+    }
+}
+
+pub struct Split<'a, P>
+    where P: FnMut(&TokenTree) -> bool
+{
+    vs: &'a TokenSlice,
+    pred: P,
+    finished: bool,
+}
+
+impl<'a, P> Iterator for Split<'a, P>
+    where P: FnMut(&TokenTree) -> bool
+{
+    type Item = &'a TokenSlice;
+
+    fn next(&mut self) -> Option<&'a TokenSlice> {
+        if self.finished {
+            return None;
+        }
+
+        match self.vs.iter().position(|x| (self.pred)(x)) {
+            None => {
+                self.finished = true;
+                Some(&self.vs[..])
+            }
+            Some(idx) => {
+                let ret = Some(&self.vs[..idx]);
+                self.vs = &self.vs[idx + 1..];
+                ret
+            }
+        }
+    }
+}
+
+impl Index<usize> for TokenStream {
+    type Output = TokenTree;
+
+    fn index(&self, index: usize) -> &TokenTree {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::Range<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeTo<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeFrom<usize>> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
+        Index::index(&**self, index)
+    }
+}
+
+impl ops::Index<ops::RangeFull> for TokenStream {
+    type Output = TokenSlice;
+
+    fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
+        Index::index(&**self, _index)
+    }
+}
+
+impl Index<usize> for TokenSlice {
+    type Output = TokenTree;
+
+    fn index(&self, index: usize) -> &TokenTree {
+        &self.0[index]
+    }
+}
+
+impl ops::Index<ops::Range<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::Range<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeTo<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeTo<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeFrom<usize>> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, index: ops::RangeFrom<usize>) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[index])
+    }
+}
+
+impl ops::Index<ops::RangeFull> for TokenSlice {
+    type Output = TokenSlice;
+
+    fn index(&self, _index: ops::RangeFull) -> &TokenSlice {
+        TokenSlice::from_tts(&self.0[_index])
+    }
+}
+
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use ast;
+    use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
+    use parse::token::{self, str_to_ident, Token, Lit};
+    use util::parser_testing::string_to_tts;
+    use std::rc::Rc;
+
+    fn sp(a: u32, b: u32) -> Span {
+        Span {
+            lo: BytePos(a),
+            hi: BytePos(b),
+            expn_id: NO_EXPANSION,
+        }
+    }
+
+    #[test]
+    fn test_is_empty() {
+        let test0 = TokenStream::from_tts(Vec::new());
+        let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
+                                                                Token::Ident(str_to_ident("a")))]);
+        let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+
+        assert_eq!(test0.is_empty(), true);
+        assert_eq!(test1.is_empty(), false);
+        assert_eq!(test2.is_empty(), false);
+    }
+
+    #[test]
+    fn test_is_delimited() {
+        let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        assert_eq!(test0.is_delimited(), false);
+        assert_eq!(test1.is_delimited(), true);
+        assert_eq!(test2.is_delimited(), true);
+        assert_eq!(test3.is_delimited(), false);
+        assert_eq!(test4.is_delimited(), false);
+        assert_eq!(test5.is_delimited(), false);
+    }
+
+    #[test]
+    fn test_is_assign() {
+        let test0 = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("= 5".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
+        let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        assert_eq!(test0.is_assignment(), true);
+        assert_eq!(test1.is_assignment(), true);
+        assert_eq!(test2.is_assignment(), true);
+        assert_eq!(test3.is_assignment(), false);
+        assert_eq!(test4.is_assignment(), true);
+        assert_eq!(test5.is_assignment(), false);
+    }
+
+    #[test]
+    fn test_is_lit() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
+
+        assert_eq!(test0.is_lit(), true);
+        assert_eq!(test1.is_lit(), true);
+        assert_eq!(test2.is_lit(), false);
+        assert_eq!(test3.is_lit(), false);
+        assert_eq!(test4.is_lit(), false);
+    }
+
+    #[test]
+    fn test_is_ident() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
+
+        assert_eq!(test0.is_ident(), false);
+        assert_eq!(test1.is_ident(), false);
+        assert_eq!(test2.is_ident(), true);
+        assert_eq!(test3.is_ident(), false);
+        assert_eq!(test4.is_ident(), false);
+    }
+
+    #[test]
+    fn test_maybe_assignment() {
+        let test0_input = TokenStream::from_tts(string_to_tts("= bar::baz".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("= \"5\"".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("= 5".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo = 10)".to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("= (foo,bar,baz)".to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        let test0 = test0_input.maybe_assignment();
+        let test1 = test1_input.maybe_assignment();
+        let test2 = test2_input.maybe_assignment();
+        let test3 = test3_input.maybe_assignment();
+        let test4 = test4_input.maybe_assignment();
+        let test5 = test5_input.maybe_assignment();
+
+        let test0_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(5, 7), token::ModSep),
+                                       TokenTree::Token(sp(7, 10),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test0, Some(&test0_expected[..]));
+
+        let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(2, 5),
+                                            token::Literal(Lit::Str_(token::intern("5")), None))]);
+        assert_eq!(test1, Some(&test1_expected[..]));
+
+        let test2_expected = TokenStream::from_tts(vec![TokenTree::Token( sp(2,3)
+                                       , token::Literal(
+                                           Lit::Integer(
+                                             token::intern(&(5.to_string()))),
+                                             None))]);
+        assert_eq!(test2, Some(&test2_expected[..]));
+
+        assert_eq!(test3, None);
+
+
+        let test4_tts = vec![TokenTree::Token(sp(3, 6), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(6, 7), token::Comma),
+                             TokenTree::Token(sp(7, 10), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(10, 11), token::Comma),
+                             TokenTree::Token(sp(11, 14), token::Ident(str_to_ident("baz")))];
+
+        let test4_expected = TokenStream::from_tts(vec![TokenTree::Delimited(sp(2, 15),
+                                                Rc::new(Delimited {
+                                                    delim: token::DelimToken::Paren,
+                                                    open_span: sp(2, 3),
+                                                    tts: test4_tts,
+                                                    close_span: sp(14, 15),
+                                                }))]);
+        assert_eq!(test4, Some(&test4_expected[..]));
+
+        assert_eq!(test5, None);
+
+    }
+
+    #[test]
+    fn test_maybe_delimited() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
+            .to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
+
+        let test0 = test0_input.maybe_delimited();
+        let test1 = test1_input.maybe_delimited();
+        let test2 = test2_input.maybe_delimited();
+        let test3 = test3_input.maybe_delimited();
+        let test4 = test4_input.maybe_delimited();
+        let test5 = test5_input.maybe_delimited();
+
+        assert_eq!(test0, None);
+
+        let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test1, Some(&test1_expected[..]));
+
+        let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo"))),
+                                       TokenTree::Token(sp(4, 5), token::Comma),
+                                       TokenTree::Token(sp(5, 8),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(8, 9), token::Comma),
+                                       TokenTree::Token(sp(9, 12),
+                                                        token::Ident(str_to_ident("baz")))]);
+        assert_eq!(test2, Some(&test2_expected[..]));
+
+        assert_eq!(test3, None);
+
+        assert_eq!(test4, None);
+
+        assert_eq!(test5, None);
+    }
+
+    #[test]
+    fn test_maybe_comma_list() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("(foo::bar,bar,baz)".to_string()));
+        let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
+            .to_string()));
+        let test5_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
+        let test6_input = TokenStream::from_tts(string_to_tts("".to_string()));
+        // The following is supported behavior!
+        let test7_input = TokenStream::from_tts(string_to_tts("(foo,bar,)".to_string()));
+
+        let test0 = test0_input.maybe_comma_list();
+        let test1 = test1_input.maybe_comma_list();
+        let test2 = test2_input.maybe_comma_list();
+        let test3 = test3_input.maybe_comma_list();
+        let test4 = test4_input.maybe_comma_list();
+        let test5 = test5_input.maybe_comma_list();
+        let test6 = test6_input.maybe_comma_list();
+        let test7 = test7_input.maybe_comma_list();
+
+        assert_eq!(test0, None);
+
+        let test1_stream = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("bar"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("baz")))]);
+
+        let test1_expected: Vec<&TokenSlice> = vec![&test1_stream[..]];
+        assert_eq!(test1, Some(test1_expected));
+
+        let test2_foo = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo")))]);
+        let test2_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(5, 8),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test2_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(9, 12),
+                                                        token::Ident(str_to_ident("baz")))]);
+        let test2_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..], &test2_baz[..]];
+        assert_eq!(test2, Some(test2_expected));
+
+        let test3_path = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
+                                                        token::Ident(str_to_ident("foo"))),
+                                       TokenTree::Token(sp(4, 6), token::ModSep),
+                                       TokenTree::Token(sp(6, 9),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test3_bar = TokenStream::from_tts(vec![TokenTree::Token(sp(10, 13),
+                                                        token::Ident(str_to_ident("bar")))]);
+        let test3_baz = TokenStream::from_tts(vec![TokenTree::Token(sp(14, 17),
+                                                        token::Ident(str_to_ident("baz")))]);
+        let test3_expected: Vec<&TokenSlice> =
+            vec![&test3_path[..], &test3_bar[..], &test3_baz[..]];
+        assert_eq!(test3, Some(test3_expected));
+
+        assert_eq!(test4, None);
+
+        assert_eq!(test5, None);
+
+        assert_eq!(test6, None);
+
+
+        let test7_expected: Vec<&TokenSlice> = vec![&test2_foo[..], &test2_bar[..]];
+        assert_eq!(test7, Some(test7_expected));
+    }
+
+    // pub fn maybe_ident(&self) -> Option<ast::Ident>
+    #[test]
+    fn test_maybe_ident() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident();
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident();
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident();
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident();
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident();
+
+        assert_eq!(test0, None);
+        assert_eq!(test1, None);
+        assert_eq!(test2, Some(str_to_ident("foo")));
+        assert_eq!(test3, None);
+        assert_eq!(test4, None);
+    }
+
+    // pub fn maybe_lit(&self) -> Option<token::Lit>
+    #[test]
+    fn test_maybe_lit() {
+        let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_lit();
+        let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_lit();
+        let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_lit();
+        let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_lit();
+        let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_lit();
+
+        assert_eq!(test0, Some(Lit::Str_(token::intern("foo"))));
+        assert_eq!(test1, Some(Lit::Integer(token::intern(&(5.to_string())))));
+        assert_eq!(test2, None);
+        assert_eq!(test3, None);
+        assert_eq!(test4, None);
+    }
+
+    #[test]
+    fn test_maybe_path_prefix() {
+        let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
+        let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
+        let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
+        let test3_input = TokenStream::from_tts(string_to_tts("foo::bar(bar,baz)".to_string()));
+
+        let test0 = test0_input.maybe_path_prefix();
+        let test1 = test1_input.maybe_path_prefix();
+        let test2 = test2_input.maybe_path_prefix();
+        let test3 = test3_input.maybe_path_prefix();
+
+        let test0_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(7, 9), token::ModSep),
+                             TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("baz")))];
+
+        let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(3, 13),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: sp(3, 4),
+                                                                   tts: test0_tts,
+                                                                   close_span: sp(12, 13),
+                                                               }))]);
+
+        let test0_expected = Some((ast::Path::from_ident(sp(0, 3), str_to_ident("foo")),
+                                   &test0_stream[..]));
+        assert_eq!(test0, test0_expected);
+
+        assert_eq!(test1, None);
+        assert_eq!(test2, None);
+
+        let test3_path = ast::Path {
+            span: sp(0, 8),
+            global: false,
+            segments: vec![ast::PathSegment {
+                               identifier: str_to_ident("foo"),
+                               parameters: ast::PathParameters::none(),
+                           },
+                           ast::PathSegment {
+                               identifier: str_to_ident("bar"),
+                               parameters: ast::PathParameters::none(),
+                           }],
+        };
+
+        let test3_tts = vec![TokenTree::Token(sp(9, 12), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(12, 13), token::Comma),
+                             TokenTree::Token(sp(13, 16), token::Ident(str_to_ident("baz")))];
+
+        let test3_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(8, 17),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: sp(8, 9),
+                                                                   tts: test3_tts,
+                                                                   close_span: sp(16, 17),
+                                                               }))]);
+        let test3_expected = Some((test3_path, &test3_stream[..]));
+        assert_eq!(test3, test3_expected);
+    }
+
+    #[test]
+    fn test_as_paren_delimited_stream() {
+        let test0 = TokenStream::as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
+        let test1 = TokenStream::as_paren_delimited_stream(string_to_tts("baz(foo,bar)"
+            .to_string()));
+
+        let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(3, 4), token::Comma),
+                             TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(7, 8), token::Comma)];
+        let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: DUMMY_SP,
+                                                                   tts: test0_tts,
+                                                                   close_span: DUMMY_SP,
+                                                               }))]);
+
+        assert_eq!(test0, test0_stream);
+
+
+        let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))),
+                             TokenTree::Token(sp(7, 8), token::Comma),
+                             TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))];
+
+        let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))),
+                               TokenTree::Delimited(sp(3, 12),
+                                                    Rc::new(Delimited {
+                                                        delim: token::DelimToken::Paren,
+                                                        open_span: sp(3, 4),
+                                                        tts: test1_tts,
+                                                        close_span: sp(11, 12),
+                                                    }))];
+
+        let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12),
+                                                               Rc::new(Delimited {
+                                                                   delim: token::DelimToken::Paren,
+                                                                   open_span: DUMMY_SP,
+                                                                   tts: test1_parse,
+                                                                   close_span: DUMMY_SP,
+                                                               }))]);
+
+        assert_eq!(test1, test1_stream);
+    }
+
+}
index 56a8c28ffedc2fe6547bca57842d320da96d388a..9cf456062385fcce660b6595b30e20594eaa5e20 100644 (file)
@@ -192,7 +192,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
 
                     if OPTIONS.iter().any(|&opt| s == opt) {
                         cx.span_warn(p.last_span, "expected a clobber, found an option");
+                    } else if s.starts_with("{") || s.ends_with("}") {
+                        cx.span_err(p.last_span, "clobber should not be surrounded by braces");
                     }
+
                     clobs.push(s);
                 }
             }
index 80ad955b60b3ac02d0462a4a65fcea597d0ebfb1..7ca76af03bb04659562890d6b4f223fffe0d748f 160000 (submodule)
--- a/src/llvm
+++ b/src/llvm
@@ -1 +1 @@
-Subproject commit 80ad955b60b3ac02d0462a4a65fcea597d0ebfb1
+Subproject commit 7ca76af03bb04659562890d6b4f223fffe0d748f
index 4017c3856c465a77672945d4f81420dd80f53ad9..1953fc5a6b48c3c9008973e89e61f095263bd486 100644 (file)
@@ -1,4 +1,4 @@
 # If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
 # The actual contents of this file do not matter, but to trigger a change on the
 # build bots then the contents should be changed so git updates the mtime.
-2016-04-28
+2016-06-23
index 546bb235a5f501c4b67f68d2482bf1a899d361ae..2b5ac7e8d80dec5efd06e5c570a74780f4216305 100644 (file)
@@ -8,6 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// In the current version of the collector that still has to support
+// legacy-trans, closures do not generate their own TransItems, so we are
+// ignoring this test until MIR trans has taken over completely
+// ignore-test
+
 // ignore-tidy-linelength
 // compile-flags:-Zprint-trans-items=eager
 
index ba77266d07248213059a742f7c85efe75c801362..278e9189dd6a72b8cc2ffdb3dfa6def8557111e9 100644 (file)
@@ -8,6 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// In the current version of the collector that still has to support
+// legacy-trans, closures do not generate their own TransItems, so we are
+// ignoring this test until MIR trans has taken over completely
+// ignore-test
+
 // ignore-tidy-linelength
 // compile-flags:-Zprint-trans-items=eager
 
index 5262d31ae0dcabbbd4db466d61f692b73b48a860..910ffd2959ed089400ec7691b95710e808db1631 100644 (file)
 // aux-build:cgu_extern_drop_glue.rs
 extern crate cgu_extern_drop_glue;
 
-//~ TRANS_ITEM drop-glue cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[OnceODR] extern_drop_glue-mod1[OnceODR]
-//~ TRANS_ITEM drop-glue-contents cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[OnceODR] extern_drop_glue-mod1[OnceODR]
+//~ TRANS_ITEM drop-glue cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[Internal] extern_drop_glue-mod1[Internal]
+//~ TRANS_ITEM drop-glue-contents cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[Internal] extern_drop_glue-mod1[Internal]
 
 struct LocalStruct(cgu_extern_drop_glue::Struct);
 
-//~ TRANS_ITEM fn extern_drop_glue::user[0] @@ extern_drop_glue[WeakODR]
+//~ TRANS_ITEM fn extern_drop_glue::user[0] @@ extern_drop_glue[External]
 fn user()
 {
-    //~ TRANS_ITEM drop-glue extern_drop_glue::LocalStruct[0] @@ extern_drop_glue[OnceODR]
+    //~ TRANS_ITEM drop-glue extern_drop_glue::LocalStruct[0] @@ extern_drop_glue[Internal]
     let _ = LocalStruct(cgu_extern_drop_glue::Struct(0));
 }
 
@@ -37,10 +37,10 @@ mod mod1 {
 
     struct LocalStruct(cgu_extern_drop_glue::Struct);
 
-    //~ TRANS_ITEM fn extern_drop_glue::mod1[0]::user[0] @@ extern_drop_glue-mod1[WeakODR]
+    //~ TRANS_ITEM fn extern_drop_glue::mod1[0]::user[0] @@ extern_drop_glue-mod1[External]
     fn user()
     {
-        //~ TRANS_ITEM drop-glue extern_drop_glue::mod1[0]::LocalStruct[0] @@ extern_drop_glue-mod1[OnceODR]
+        //~ TRANS_ITEM drop-glue extern_drop_glue::mod1[0]::LocalStruct[0] @@ extern_drop_glue-mod1[Internal]
         let _ = LocalStruct(cgu_extern_drop_glue::Struct(0));
     }
 }
index 6beed231df993704c3472b8e72c7e47030db173b..58f904f48a17dad25d5d4625182610bec2fbffce 100644 (file)
@@ -19,7 +19,7 @@
 // aux-build:cgu_generic_function.rs
 extern crate cgu_generic_function;
 
-//~ TRANS_ITEM fn extern_generic::user[0] @@ extern_generic[WeakODR]
+//~ TRANS_ITEM fn extern_generic::user[0] @@ extern_generic[External]
 fn user() {
     let _ = cgu_generic_function::foo("abc");
 }
@@ -27,7 +27,7 @@ fn user() {
 mod mod1 {
     use cgu_generic_function;
 
-    //~ TRANS_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[WeakODR]
+    //~ TRANS_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[External]
     fn user() {
         let _ = cgu_generic_function::foo("abc");
     }
@@ -35,7 +35,7 @@ fn user() {
     mod mod1 {
         use cgu_generic_function;
 
-        //~ TRANS_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[External]
         fn user() {
             let _ = cgu_generic_function::foo("abc");
         }
@@ -45,20 +45,20 @@ fn user() {
 mod mod2 {
     use cgu_generic_function;
 
-    //~ TRANS_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[WeakODR]
+    //~ TRANS_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[External]
     fn user() {
         let _ = cgu_generic_function::foo("abc");
     }
 }
 
 mod mod3 {
-    //~ TRANS_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[WeakODR]
+    //~ TRANS_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[External]
     fn non_user() {}
 }
 
 // Make sure the two generic functions from the extern crate get instantiated
 // privately in every module they are use in.
-//~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ extern_generic[OnceODR] extern_generic-mod1[OnceODR] extern_generic-mod2[OnceODR] extern_generic-mod1-mod1[OnceODR]
-//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ extern_generic[OnceODR] extern_generic-mod1[OnceODR] extern_generic-mod2[OnceODR] extern_generic-mod1-mod1[OnceODR]
+//~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ extern_generic[Internal] extern_generic-mod1[Internal] extern_generic-mod2[Internal] extern_generic-mod1-mod1[Internal]
+//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ extern_generic[Internal] extern_generic-mod1[Internal] extern_generic-mod2[Internal] extern_generic-mod1-mod1[Internal]
 
 //~ TRANS_ITEM drop-glue i8
index 967824f31d0456279a5396cded3edfafa7926741..118513f65541b30e046a4c8a882b368e28eb19e3 100644 (file)
 // This test makes sure that items inlined from external crates are privately
 // instantiated in every codegen unit they are used in.
 
-//~ TRANS_ITEM fn cgu_explicit_inlining::inlined[0] @@ inlining_from_extern_crate[OnceODR] inlining_from_extern_crate-mod1[OnceODR]
-//~ TRANS_ITEM fn cgu_explicit_inlining::always_inlined[0] @@ inlining_from_extern_crate[OnceODR] inlining_from_extern_crate-mod2[OnceODR]
+//~ TRANS_ITEM fn cgu_explicit_inlining::inlined[0] @@ inlining_from_extern_crate[Internal] inlining_from_extern_crate-mod1[Internal]
+//~ TRANS_ITEM fn cgu_explicit_inlining::always_inlined[0] @@ inlining_from_extern_crate[Internal] inlining_from_extern_crate-mod2[Internal]
 
-//~ TRANS_ITEM fn inlining_from_extern_crate::user[0] @@ inlining_from_extern_crate[WeakODR]
+//~ TRANS_ITEM fn inlining_from_extern_crate::user[0] @@ inlining_from_extern_crate[External]
 pub fn user()
 {
     cgu_explicit_inlining::inlined();
@@ -37,7 +37,7 @@ pub fn user()
 mod mod1 {
     use cgu_explicit_inlining;
 
-    //~ TRANS_ITEM fn inlining_from_extern_crate::mod1[0]::user[0] @@ inlining_from_extern_crate-mod1[WeakODR]
+    //~ TRANS_ITEM fn inlining_from_extern_crate::mod1[0]::user[0] @@ inlining_from_extern_crate-mod1[External]
     pub fn user()
     {
         cgu_explicit_inlining::inlined();
@@ -50,7 +50,7 @@ pub fn user()
 mod mod2 {
     use cgu_explicit_inlining;
 
-    //~ TRANS_ITEM fn inlining_from_extern_crate::mod2[0]::user[0] @@ inlining_from_extern_crate-mod2[WeakODR]
+    //~ TRANS_ITEM fn inlining_from_extern_crate::mod2[0]::user[0] @@ inlining_from_extern_crate-mod2[External]
     pub fn user()
     {
         cgu_explicit_inlining::always_inlined();
index 04ebef645ec9836ec648b279ec1548f30128b070..f61e3fe12931e0ddb6041ca5d240f3f804b01236 100644 (file)
 #![allow(dead_code)]
 #![crate_type="lib"]
 
-//~ TRANS_ITEM drop-glue local_drop_glue::Struct[0] @@ local_drop_glue[OnceODR] local_drop_glue-mod1[OnceODR]
-//~ TRANS_ITEM drop-glue-contents local_drop_glue::Struct[0] @@ local_drop_glue[OnceODR] local_drop_glue-mod1[OnceODR]
+//~ TRANS_ITEM drop-glue local_drop_glue::Struct[0] @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal]
+//~ TRANS_ITEM drop-glue-contents local_drop_glue::Struct[0] @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal]
 struct Struct {
     _a: u32
 }
 
 impl Drop for Struct {
-    //~ TRANS_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[WeakODR]
+    //~ TRANS_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[External]
     fn drop(&mut self) {}
 }
 
-//~ TRANS_ITEM drop-glue local_drop_glue::Outer[0] @@ local_drop_glue[OnceODR]
+//~ TRANS_ITEM drop-glue local_drop_glue::Outer[0] @@ local_drop_glue[Internal]
 struct Outer {
     _a: Struct
 }
 
-//~ TRANS_ITEM fn local_drop_glue::user[0] @@ local_drop_glue[WeakODR]
+//~ TRANS_ITEM fn local_drop_glue::user[0] @@ local_drop_glue[External]
 fn user()
 {
     let _ = Outer {
@@ -46,14 +46,14 @@ mod mod1
 {
     use super::Struct;
 
-    //~ TRANS_ITEM drop-glue local_drop_glue::mod1[0]::Struct2[0] @@ local_drop_glue-mod1[OnceODR]
+    //~ TRANS_ITEM drop-glue local_drop_glue::mod1[0]::Struct2[0] @@ local_drop_glue-mod1[Internal]
     struct Struct2 {
         _a: Struct,
-        //~ TRANS_ITEM drop-glue (u32, local_drop_glue::Struct[0]) @@ local_drop_glue-mod1[OnceODR]
+        //~ TRANS_ITEM drop-glue (u32, local_drop_glue::Struct[0]) @@ local_drop_glue-mod1[Internal]
         _b: (u32, Struct),
     }
 
-    //~ TRANS_ITEM fn local_drop_glue::mod1[0]::user[0] @@ local_drop_glue-mod1[WeakODR]
+    //~ TRANS_ITEM fn local_drop_glue::mod1[0]::user[0] @@ local_drop_glue-mod1[External]
     fn user()
     {
         let _ = Struct2 {
index f5641f1f2ed7376210ca0a73e2bc34f11c3a840b..2d744169d3f8e5800a086956b8201953ba66347c 100644 (file)
 #![allow(dead_code)]
 #![crate_type="lib"]
 
-// Used in different modules/codegen units but always instantiated in the same
-// codegen unit.
-
-//~ TRANS_ITEM fn local_generic::generic[0]<u32> @@ local_generic.volatile[WeakODR]
-//~ TRANS_ITEM fn local_generic::generic[0]<u64> @@ local_generic.volatile[WeakODR]
-//~ TRANS_ITEM fn local_generic::generic[0]<char> @@ local_generic.volatile[WeakODR]
-//~ TRANS_ITEM fn local_generic::generic[0]<&str> @@ local_generic.volatile[WeakODR]
+//~ TRANS_ITEM fn local_generic::generic[0]<u32> @@ local_generic[Internal]
+//~ TRANS_ITEM fn local_generic::generic[0]<u64> @@ local_generic-mod1[Internal]
+//~ TRANS_ITEM fn local_generic::generic[0]<char> @@ local_generic-mod1-mod1[Internal]
+//~ TRANS_ITEM fn local_generic::generic[0]<&str> @@ local_generic-mod2[Internal]
 pub fn generic<T>(x: T) -> T { x }
 
-//~ TRANS_ITEM fn local_generic::user[0] @@ local_generic[WeakODR]
+//~ TRANS_ITEM fn local_generic::user[0] @@ local_generic[External]
 fn user() {
     let _ = generic(0u32);
 }
@@ -33,7 +30,7 @@ fn user() {
 mod mod1 {
     pub use super::generic;
 
-    //~ TRANS_ITEM fn local_generic::mod1[0]::user[0] @@ local_generic-mod1[WeakODR]
+    //~ TRANS_ITEM fn local_generic::mod1[0]::user[0] @@ local_generic-mod1[External]
     fn user() {
         let _ = generic(0u64);
     }
@@ -41,7 +38,7 @@ fn user() {
     mod mod1 {
         use super::generic;
 
-        //~ TRANS_ITEM fn local_generic::mod1[0]::mod1[0]::user[0] @@ local_generic-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn local_generic::mod1[0]::mod1[0]::user[0] @@ local_generic-mod1-mod1[External]
         fn user() {
             let _ = generic('c');
         }
@@ -51,7 +48,7 @@ fn user() {
 mod mod2 {
     use super::generic;
 
-    //~ TRANS_ITEM fn local_generic::mod2[0]::user[0] @@ local_generic-mod2[WeakODR]
+    //~ TRANS_ITEM fn local_generic::mod2[0]::user[0] @@ local_generic-mod2[External]
     fn user() {
         let _ = generic("abc");
     }
index 880cc0a4fb47a003c75cbb719825687b1aab2044..5eb1cbc2199f759bccd5003c3275b0c1e7417d5c 100644 (file)
@@ -19,7 +19,7 @@
 mod inline {
 
     // Important: This function should show up in all codegen units where it is inlined
-    //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-inline[WeakODR] local_inlining-user1[Available] local_inlining-user2[Available]
+    //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-inline[External] local_inlining-user1[Available] local_inlining-user2[Available]
     #[inline(always)]
     pub fn inlined_function()
     {
@@ -30,7 +30,7 @@ pub fn inlined_function()
 mod user1 {
     use super::inline;
 
-    //~ TRANS_ITEM fn local_inlining::user1[0]::foo[0] @@ local_inlining-user1[WeakODR]
+    //~ TRANS_ITEM fn local_inlining::user1[0]::foo[0] @@ local_inlining-user1[External]
     fn foo() {
         inline::inlined_function();
     }
@@ -39,7 +39,7 @@ fn foo() {
 mod user2 {
     use super::inline;
 
-    //~ TRANS_ITEM fn local_inlining::user2[0]::bar[0] @@ local_inlining-user2[WeakODR]
+    //~ TRANS_ITEM fn local_inlining::user2[0]::bar[0] @@ local_inlining-user2[External]
     fn bar() {
         inline::inlined_function();
     }
@@ -47,7 +47,7 @@ fn bar() {
 
 mod non_user {
 
-    //~ TRANS_ITEM fn local_inlining::non_user[0]::baz[0] @@ local_inlining-non_user[WeakODR]
+    //~ TRANS_ITEM fn local_inlining::non_user[0]::baz[0] @@ local_inlining-non_user[External]
     fn baz() {
 
     }
index f3efa2587d3d5d3f7518e6294d52a0c91f97e5cb..28c4698eabd1fe636f802e828447d433cbffb067 100644 (file)
@@ -18,7 +18,7 @@
 
 mod inline {
 
-    //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-inline[WeakODR] local_transitive_inlining-direct_user[Available] local_transitive_inlining-indirect_user[Available]
+    //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-inline[External] local_transitive_inlining-direct_user[Available] local_transitive_inlining-indirect_user[Available]
     #[inline(always)]
     pub fn inlined_function()
     {
@@ -29,7 +29,7 @@ pub fn inlined_function()
 mod direct_user {
     use super::inline;
 
-    //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-direct_user[WeakODR] local_transitive_inlining-indirect_user[Available]
+    //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-direct_user[External] local_transitive_inlining-indirect_user[Available]
     #[inline(always)]
     pub fn foo() {
         inline::inlined_function();
@@ -39,7 +39,7 @@ pub fn foo() {
 mod indirect_user {
     use super::direct_user;
 
-    //~ TRANS_ITEM fn local_transitive_inlining::indirect_user[0]::bar[0] @@ local_transitive_inlining-indirect_user[WeakODR]
+    //~ TRANS_ITEM fn local_transitive_inlining::indirect_user[0]::bar[0] @@ local_transitive_inlining-indirect_user[External]
     fn bar() {
         direct_user::foo();
     }
@@ -47,7 +47,7 @@ fn bar() {
 
 mod non_user {
 
-    //~ TRANS_ITEM fn local_transitive_inlining::non_user[0]::baz[0] @@ local_transitive_inlining-non_user[WeakODR]
+    //~ TRANS_ITEM fn local_transitive_inlining::non_user[0]::baz[0] @@ local_transitive_inlining-non_user[External]
     fn baz() {
 
     }
index 99dda0e38bad7aeb63ac64262a91136f9f4b1fcf..1ea5aafd401d20cff73e7d507e10e0e111510e1b 100644 (file)
@@ -8,6 +8,11 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// Currently, all generic functions are instantiated in each codegen unit that
+// uses them, even those not marked with #[inline], so this test does not make
+// much sense at the moment.
+// ignore-test
+
 // ignore-tidy-linelength
 // We specify -Z incremental here because we want to test the partitioning for
 // incremental compilation
@@ -25,10 +30,10 @@ mod mod1 {
     // Even though the impl is in `mod1`, the methods should end up in the
     // parent module, since that is where their self-type is.
     impl SomeType {
-        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::method[0] @@ methods_are_with_self_type[WeakODR]
+        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::method[0] @@ methods_are_with_self_type[External]
         fn method(&self) {}
 
-        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::associated_fn[0] @@ methods_are_with_self_type[WeakODR]
+        //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::associated_fn[0] @@ methods_are_with_self_type[External]
         fn associated_fn() {}
     }
 
index c3af86f820f18f20a3045e14bd95607188fca004..4da64110321688400ba7b734460abd4b5e344ada 100644 (file)
 #![allow(dead_code)]
 #![crate_type="lib"]
 
-//~ TRANS_ITEM fn regular_modules::foo[0] @@ regular_modules[WeakODR]
+//~ TRANS_ITEM fn regular_modules::foo[0] @@ regular_modules[External]
 fn foo() {}
 
-//~ TRANS_ITEM fn regular_modules::bar[0] @@ regular_modules[WeakODR]
+//~ TRANS_ITEM fn regular_modules::bar[0] @@ regular_modules[External]
 fn bar() {}
 
 //~ TRANS_ITEM static regular_modules::BAZ[0] @@ regular_modules[External]
@@ -27,26 +27,26 @@ fn bar() {}
 
 mod mod1 {
 
-    //~ TRANS_ITEM fn regular_modules::mod1[0]::foo[0] @@ regular_modules-mod1[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod1[0]::foo[0] @@ regular_modules-mod1[External]
     fn foo() {}
-    //~ TRANS_ITEM fn regular_modules::mod1[0]::bar[0] @@ regular_modules-mod1[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod1[0]::bar[0] @@ regular_modules-mod1[External]
     fn bar() {}
     //~ TRANS_ITEM static regular_modules::mod1[0]::BAZ[0] @@ regular_modules-mod1[External]
     static BAZ: u64 = 0;
 
     mod mod1 {
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::foo[0] @@ regular_modules-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::foo[0] @@ regular_modules-mod1-mod1[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::bar[0] @@ regular_modules-mod1-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::bar[0] @@ regular_modules-mod1-mod1[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod1[0]::mod1[0]::BAZ[0] @@ regular_modules-mod1-mod1[External]
         static BAZ: u64 = 0;
     }
 
     mod mod2 {
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::foo[0] @@ regular_modules-mod1-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::foo[0] @@ regular_modules-mod1-mod2[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::bar[0] @@ regular_modules-mod1-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::bar[0] @@ regular_modules-mod1-mod2[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod1[0]::mod2[0]::BAZ[0] @@ regular_modules-mod1-mod2[External]
         static BAZ: u64 = 0;
@@ -55,26 +55,26 @@ fn bar() {}
 
 mod mod2 {
 
-    //~ TRANS_ITEM fn regular_modules::mod2[0]::foo[0] @@ regular_modules-mod2[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod2[0]::foo[0] @@ regular_modules-mod2[External]
     fn foo() {}
-    //~ TRANS_ITEM fn regular_modules::mod2[0]::bar[0] @@ regular_modules-mod2[WeakODR]
+    //~ TRANS_ITEM fn regular_modules::mod2[0]::bar[0] @@ regular_modules-mod2[External]
     fn bar() {}
     //~ TRANS_ITEM static regular_modules::mod2[0]::BAZ[0] @@ regular_modules-mod2[External]
     static BAZ: u64 = 0;
 
     mod mod1 {
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::foo[0] @@ regular_modules-mod2-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::foo[0] @@ regular_modules-mod2-mod1[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::bar[0] @@ regular_modules-mod2-mod1[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::bar[0] @@ regular_modules-mod2-mod1[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod2[0]::mod1[0]::BAZ[0] @@ regular_modules-mod2-mod1[External]
         static BAZ: u64 = 0;
     }
 
     mod mod2 {
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::foo[0] @@ regular_modules-mod2-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::foo[0] @@ regular_modules-mod2-mod2[External]
         fn foo() {}
-        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::bar[0] @@ regular_modules-mod2-mod2[WeakODR]
+        //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::bar[0] @@ regular_modules-mod2-mod2[External]
         fn bar() {}
         //~ TRANS_ITEM static regular_modules::mod2[0]::mod2[0]::BAZ[0] @@ regular_modules-mod2-mod2[External]
         static BAZ: u64 = 0;
index 9e878b95a369a043c4c2f68eeb308fa68a6fe177..ffe1ec278b8dd3e0b99075d4bf615a9acfec2b69 100644 (file)
@@ -21,7 +21,7 @@
 //~ TRANS_ITEM static statics::BAR[0] @@ statics[External]
 static BAR: u32 = 0;
 
-//~ TRANS_ITEM fn statics::function[0] @@ statics[WeakODR]
+//~ TRANS_ITEM fn statics::function[0] @@ statics[External]
 fn function() {
     //~ TRANS_ITEM static statics::function[0]::FOO[0] @@ statics[External]
     static FOO: u32 = 0;
@@ -37,7 +37,7 @@ mod mod1 {
     //~ TRANS_ITEM static statics::mod1[0]::BAR[0] @@ statics-mod1[External]
     static BAR: u32 = 0;
 
-    //~ TRANS_ITEM fn statics::mod1[0]::function[0] @@ statics-mod1[WeakODR]
+    //~ TRANS_ITEM fn statics::mod1[0]::function[0] @@ statics-mod1[External]
     fn function() {
         //~ TRANS_ITEM static statics::mod1[0]::function[0]::FOO[0] @@ statics-mod1[External]
         static FOO: u32 = 0;
index 83dd6a3b00258ec5ee4af95d2ff11877cf98cc73..25f8c130469973d888c617dca289abc639386b54 100644 (file)
@@ -31,13 +31,13 @@ pub fn droppy() {
 // that's one new drop call per call to possibly_unwinding(), and finally 3 drop calls for the
 // regular function exit. We used to have problems with quadratic growths of drop calls in such
 // functions.
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK: call{{.*}}SomeUniqueName{{.*}}drop
-// CHECK-NOT: call{{.*}}SomeUniqueName{{.*}}drop
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK: call{{.*}}drop{{.*}}SomeUniqueName
+// CHECK-NOT: call{{.*}}drop{{.*}}SomeUniqueName
 // The next line checks for the } that ends the function definition
 // CHECK-LABEL: {{^[}]}}
     let _s = SomeUniqueName;
diff --git a/src/test/compile-fail/asm-bad-clobber.rs b/src/test/compile-fail/asm-bad-clobber.rs
new file mode 100644 (file)
index 0000000..714343a
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-android
+// ignore-arm
+// ignore-aarch64
+
+#![feature(asm, rustc_attrs)]
+
+#[cfg(any(target_arch = "x86",
+          target_arch = "x86_64"))]
+#[rustc_error]
+pub fn main() {
+    unsafe {
+        // clobber formatted as register input/output
+        asm!("xor %eax, %eax" : : : "{eax}");
+        //~^ ERROR clobber should not be surrounded by braces
+    }
+}
diff --git a/src/test/compile-fail/cfg_attr_path.rs b/src/test/compile-fail/cfg_attr_path.rs
new file mode 100644 (file)
index 0000000..502768c
--- /dev/null
@@ -0,0 +1,12 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[cfg_attr(all(), path = "nonexistent_file.rs")] mod foo;
+//~^ ERROR nonexistent_file.rs
diff --git a/src/test/compile-fail/intrinsic-return-address.rs b/src/test/compile-fail/intrinsic-return-address.rs
deleted file mode 100644 (file)
index 9060568..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![allow(warnings)]
-#![feature(intrinsics)]
-
-extern "rust-intrinsic" {
-    fn return_address() -> *const u8;
-}
-
-unsafe fn f() { let _ = return_address(); }
-//~^ ERROR invalid use of `return_address` intrinsic: function does not use out pointer
-
-unsafe fn g() -> isize { let _ = return_address(); 0 }
-//~^ ERROR invalid use of `return_address` intrinsic: function does not use out pointer
-
-fn main() {}
index 11d81eda55625960ae69fcca3483de8072e7de35..3f50811f826e07e83a37bd50d8b0fbfac40fc665 100644 (file)
@@ -72,6 +72,7 @@ fn expand_into_foo_multi(cx: &mut ExtCtxt,
                 ..(*quote_item!(cx, enum Foo2 { Bar2, Baz2 }).unwrap()).clone()
             })),
             Annotatable::Item(quote_item!(cx, enum Foo3 { Bar }).unwrap()),
+            Annotatable::Item(quote_item!(cx, #[cfg(any())] fn foo2() {}).unwrap()),
         ],
         Annotatable::ImplItem(it) => vec![
             quote_item!(cx, impl X { fn foo(&self) -> i32 { 42 } }).unwrap().and_then(|i| {
index d17adff007c6335b440c4c42063a753f99f2c91c..fe2317aabea68b43f60c53e0c1e81932b562fdf6 100644 (file)
@@ -21,6 +21,9 @@
 #[derive(PartialEq, Clone, Debug)]
 fn foo() -> AnotherFakeTypeThatHadBetterGoAway {}
 
+// Check that the `#[into_multi_foo]`-generated `foo2` is configured away
+fn foo2() {}
+
 trait Qux {
     #[into_multi_foo]
     fn bar();
diff --git a/src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs b/src/test/run-pass/auxiliary/xcrate_generic_fn_nested_return.rs
new file mode 100644 (file)
index 0000000..48fb05f
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub struct Request {
+    pub id: String,
+    pub arg: String,
+}
+
+pub fn decode<T>() -> Result<Request, ()> {
+    (|| {
+        Ok(Request {
+            id: "hi".to_owned(),
+            arg: match Err(()) {
+                Ok(v) => v,
+                Err(e) => return Err(e)
+            },
+        })
+    })()
+}
diff --git a/src/test/run-pass/intrinsic-return-address.rs b/src/test/run-pass/intrinsic-return-address.rs
deleted file mode 100644 (file)
index 63aed3f..0000000
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-
-#![feature(intrinsics)]
-
-use std::ptr;
-
-struct Point {
-    x: f32,
-    y: f32,
-    z: f32,
-}
-
-extern "rust-intrinsic" {
-    fn return_address() -> *const u8;
-}
-
-fn f(result: &mut usize) -> Point {
-    unsafe {
-        *result = return_address() as usize;
-        Point {
-            x: 1.0,
-            y: 2.0,
-            z: 3.0,
-        }
-    }
-
-}
-
-fn main() {
-    let mut intrinsic_reported_address = 0;
-    let pt = f(&mut intrinsic_reported_address);
-    let actual_address = &pt as *const Point as usize;
-    assert_eq!(intrinsic_reported_address, actual_address);
-}
diff --git a/src/test/run-pass/issue-27021.rs b/src/test/run-pass/issue-27021.rs
new file mode 100644 (file)
index 0000000..eb7d529
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+    let mut c = (1, (1, "".to_owned()));
+    match c {
+        c2 => { (c.1).0 = 2; assert_eq!((c2.1).0, 1); }
+    }
+
+    let mut c = (1, (1, (1, "".to_owned())));
+    match c.1 {
+        c2 => { ((c.1).1).0 = 3; assert_eq!((c2.1).0, 1); }
+    }
+}
index 2371909b31b779f96d46f57240526edc607775a5..ca3294a87adbb1a0ffbbe2c88d16d536ddf30620 100644 (file)
@@ -171,6 +171,13 @@ fn test_fn_ignored_pair_named() -> (Foo, Foo) {
     id(ignored_pair_named())
 }
 
+#[rustc_mir]
+fn test_fn_nested_pair(x: &((f32, f32), u32)) -> (f32, f32) {
+    let y = *x;
+    let z = y.0;
+    (z.0, z.1)
+}
+
 fn main() {
     assert_eq!(test1(1, (2, 3), &[4, 5, 6]), (1, (2, 3), &[4, 5, 6][..]));
     assert_eq!(test2(98), 98);
@@ -196,4 +203,5 @@ fn main() {
 
     assert_eq!(test_fn_ignored_pair_0(), ());
     assert_eq!(test_fn_ignored_pair_named(), (Foo, Foo));
+    assert_eq!(test_fn_nested_pair(&((1.0, 2.0), 0)), (1.0, 2.0));
 }
diff --git a/src/test/run-pass/xcrate_generic_fn_nested_return.rs b/src/test/run-pass/xcrate_generic_fn_nested_return.rs
new file mode 100644 (file)
index 0000000..181c916
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:xcrate_generic_fn_nested_return.rs
+
+extern crate xcrate_generic_fn_nested_return as test;
+
+pub fn main() {
+    assert!(test::decode::<()>().is_err());
+}
diff --git a/src/test/rustdoc/prim-title.rs b/src/test/rustdoc/prim-title.rs
new file mode 100644 (file)
index 0000000..79a3e7f
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "foo"]
+
+// @has foo/primitive.u8.html '//head/title' 'u8 - Rust'
+// @!has - '//head/title' 'foo'
+#[doc(primitive = "u8")]
+/// `u8` docs
+mod u8 {}