]> git.lizzy.rs Git - rust.git/commitdiff
Move code into Step trait implementations.
authorMark Simulacrum <mark.simulacrum@gmail.com>
Wed, 5 Jul 2017 01:41:43 +0000 (19:41 -0600)
committerMark Simulacrum <mark.simulacrum@gmail.com>
Thu, 20 Jul 2017 17:23:57 +0000 (11:23 -0600)
No changes are introduced to code body. This commit will not build; it
is done to permit a better diff in later commits.

src/bootstrap/check.rs
src/bootstrap/compile.rs
src/bootstrap/dist.rs
src/bootstrap/doc.rs
src/bootstrap/native.rs
src/bootstrap/tool.rs [new file with mode: 0644]

index 5e553cf8d6fb8d5c5a226d715feb81bc071c1c21..0278b8d5dbf2b3bc9de4c576e94af3ae7385acfe 100644 (file)
 
 use build_helper::{self, output};
 
 
 use build_helper::{self, output};
 
-use {Build, Compiler, Mode};
+use {Build, Mode};
 use dist;
 use dist;
-use util::{self, dylib_path, dylib_path_var, exe};
+use util::{self, dylib_path, dylib_path_var};
+
+use compile;
+use native;
+use builder::{Kind, Builder, Compiler, Step};
+use tool::Tool;
 
 const ADB_TEST_DIR: &str = "/data/tmp/work";
 
 
 const ADB_TEST_DIR: &str = "/data/tmp/work";
 
@@ -87,17 +92,30 @@ fn try_run_quiet(build: &Build, cmd: &mut Command) {
 //      .default(build.config.docs)
 //      .host(true)
 //      .run(move |s| check::linkcheck(build, s.target));
 //      .default(build.config.docs)
 //      .host(true)
 //      .run(move |s| check::linkcheck(build, s.target));
-/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will verify the validity of all our links in the
-/// documentation to ensure we don't have a bunch of dead ones.
-pub fn linkcheck(build: &Build, host: &str) {
-    println!("Linkcheck ({})", host);
-    let compiler = Compiler::new(0, host);
 
 
-    let _time = util::timeit();
-    try_run(build, build.tool_cmd(&compiler, "linkchecker")
-                        .arg(build.out.join(host).join("doc")));
+#[derive(Serialize)]
+pub struct Linkcheck<'a> {
+    host: &'a str,
+}
+
+impl<'a> Step<'a> for Linkcheck<'a> {
+    type Output = ();
+
+    /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
+    ///
+    /// This tool in `src/tools` will verify the validity of all our links in the
+    /// documentation to ensure we don't have a bunch of dead ones.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let host = self.host;
+
+        println!("Linkcheck ({})", host);
+        let compiler = Compiler::new(0, host);
+
+        let _time = util::timeit();
+        try_run(build, build.tool_cmd(&compiler, "linkchecker")
+                            .arg(build.out.join(host).join("doc")));
+    }
 }
 
 // rules.test("check-cargotest", "src/tools/cargotest")
 }
 
 // rules.test("check-cargotest", "src/tools/cargotest")
@@ -105,65 +123,120 @@ pub fn linkcheck(build: &Build, host: &str) {
 //      .dep(|s| s.name("librustc"))
 //      .host(true)
 //      .run(move |s| check::cargotest(build, s.stage, s.target));
 //      .dep(|s| s.name("librustc"))
 //      .host(true)
 //      .run(move |s| check::cargotest(build, s.stage, s.target));
-/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will check out a few Rust projects and run `cargo
-/// test` to ensure that we don't regress the test suites there.
-pub fn cargotest(build: &Build, stage: u32, host: &str) {
-    let compiler = Compiler::new(stage, host);
 
 
-    // Note that this is a short, cryptic, and not scoped directory name. This
-    // is currently to minimize the length of path on Windows where we otherwise
-    // quickly run into path name limit constraints.
-    let out_dir = build.out.join("ct");
-    t!(fs::create_dir_all(&out_dir));
+#[derive(Serialize)]
+pub struct Cargotest<'a> {
+    stage: u32,
+    host: &'a str,
+}
 
 
-    let _time = util::timeit();
-    let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
-    build.prepare_tool_cmd(&compiler, &mut cmd);
-    try_run(build, cmd.arg(&build.initial_cargo)
-                      .arg(&out_dir)
-                      .env("RUSTC", build.compiler_path(&compiler))
-                      .env("RUSTDOC", build.rustdoc(&compiler)));
+impl<'a> Step<'a> for Cargotest<'a> {
+    type Output = ();
+
+    /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
+    ///
+    /// This tool in `src/tools` will check out a few Rust projects and run `cargo
+    /// test` to ensure that we don't regress the test suites there.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let host = self.host;
+        let compiler = Compiler::new(stage, host);
+
+        // Note that this is a short, cryptic, and not scoped directory name. This
+        // is currently to minimize the length of path on Windows where we otherwise
+        // quickly run into path name limit constraints.
+        let out_dir = build.out.join("ct");
+        t!(fs::create_dir_all(&out_dir));
+
+        let _time = util::timeit();
+        let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
+        build.prepare_tool_cmd(&compiler, &mut cmd);
+        try_run(build, cmd.arg(&build.initial_cargo)
+                          .arg(&out_dir)
+                          .env("RUSTC", build.compiler_path(&compiler))
+                          .env("RUSTDOC", build.rustdoc(&compiler)));
+    }
 }
 
 //rules.test("check-cargo", "cargo")
 //     .dep(|s| s.name("tool-cargo"))
 //     .host(true)
 //     .run(move |s| check::cargo(build, s.stage, s.target));
 }
 
 //rules.test("check-cargo", "cargo")
 //     .dep(|s| s.name("tool-cargo"))
 //     .host(true)
 //     .run(move |s| check::cargo(build, s.stage, s.target));
-/// Runs `cargo test` for `cargo` packaged with Rust.
-pub fn cargo(build: &Build, stage: u32, host: &str) {
-    let compiler = &Compiler::new(stage, host);
 
 
-    let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
-    cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
-    if !build.fail_fast {
-        cargo.arg("--no-fail-fast");
-    }
+#[derive(Serialize)]
+pub struct Cargo<'a> {
+    stage: u32,
+    host: &'a str,
+}
+
+impl<'a> Step<'a> for Cargo<'a> {
+    type Output = ();
+
+    /// Runs `cargo test` for `cargo` packaged with Rust.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let ref compiler = Compiler::new(stage, host);
+
+        // Configure PATH to find the right rustc. NB. we have to use PATH
+        // and not RUSTC because the Cargo test suite has tests that will
+        // fail if rustc is not spelled `rustc`.
+        let path = build.sysroot(compiler).join("bin");
+        let old_path = env::var_os("PATH").unwrap_or_default();
+        let newpath = env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("");
+
+        let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
+        cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
+        if !build.fail_fast {
+            cargo.arg("--no-fail-fast");
+        }
 
 
-    // Don't build tests dynamically, just a pain to work with
-    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+        let compiler = &Compiler::new(stage, host);
 
 
-    // Don't run cross-compile tests, we may not have cross-compiled libstd libs
-    // available.
-    cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
+        let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
+        cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
+        if !build.fail_fast {
+            cargo.arg("--no-fail-fast");
+        }
+
+        // Don't build tests dynamically, just a pain to work with
+        cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+        // Don't run cross-compile tests, we may not have cross-compiled libstd libs
+        // available.
+        cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
 
 
-    try_run(build, cargo.env("PATH", &path_for_cargo(build, compiler)));
+        try_run(build, cargo.env("PATH", &path_for_cargo(build, compiler)));
+    }
 }
 
 }
 
-/// Runs `cargo test` for the rls.
-pub fn rls(build: &Build, stage: u32, host: &str) {
-    let compiler = &Compiler::new(stage, host);
+#[derive(Serialize)]
+pub struct Rls<'a> {
+    stage: u32,
+    host: &'a str,
+}
 
 
-    let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
-    cargo.arg("--manifest-path").arg(build.src.join("src/tools/rls/Cargo.toml"));
+impl<'a> Step<'a> for Rls<'a> {
+    type Output = ();
 
 
-    // Don't build tests dynamically, just a pain to work with
-    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+    /// Runs `cargo test` for the rls.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let host = self.host;
+        let compiler = &Compiler::new(stage, host);
 
 
-    build.add_rustc_lib_path(compiler, &mut cargo);
+        let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
+        cargo.arg("--manifest-path").arg(build.src.join("src/tools/rls/Cargo.toml"));
 
 
-    try_run(build, &mut cargo);
+        // Don't build tests dynamically, just a pain to work with
+        cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+        build.add_rustc_lib_path(compiler, &mut cargo);
+
+        try_run(build, &mut cargo);
+    }
 }
 
 fn path_for_cargo(build: &Build, compiler: &Compiler) -> OsString {
 }
 
 fn path_for_cargo(build: &Build, compiler: &Compiler) -> OsString {
@@ -173,6 +246,13 @@ fn path_for_cargo(build: &Build, compiler: &Compiler) -> OsString {
     let path = build.sysroot(compiler).join("bin");
     let old_path = env::var_os("PATH").unwrap_or_default();
     env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
     let path = build.sysroot(compiler).join("bin");
     let old_path = env::var_os("PATH").unwrap_or_default();
     env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
+||||||| parent of adabe3889e... Move code into Step trait implementations.
+    try_run(build, cargo.env("PATH", newpath));
+=======
+        try_run(build, cargo.env("PATH", newpath));
+        let host = self.host;
+    }
+>>>>>>> adabe3889e... Move code into Step trait implementations.
 }
 
 //rules.test("check-tidy", "src/tools/tidy")
 }
 
 //rules.test("check-tidy", "src/tools/tidy")
@@ -181,24 +261,37 @@ fn path_for_cargo(build: &Build, compiler: &Compiler) -> OsString {
 //     .host(true)
 //     .only_build(true)
 //     .run(move |s| check::tidy(build, s.target));
 //     .host(true)
 //     .only_build(true)
 //     .run(move |s| check::tidy(build, s.target));
-/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` checks up on various bits and pieces of style and
-/// otherwise just implements a few lint-like checks that are specific to the
-/// compiler itself.
-pub fn tidy(build: &Build, host: &str) {
-    let _folder = build.fold_output(|| "tidy");
-    println!("tidy check ({})", host);
-    let compiler = Compiler::new(0, host);
-    let mut cmd = build.tool_cmd(&compiler, "tidy");
-    cmd.arg(build.src.join("src"));
-    if !build.config.vendor {
-        cmd.arg("--no-vendor");
-    }
-    if build.config.quiet_tests {
-        cmd.arg("--quiet");
+
+#[derive(Serialize)]
+pub struct Tidy<'a> {
+    host: &'a str,
+}
+
+impl<'a> Step<'a> for Tidy<'a> {
+    type Output = ();
+
+    /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
+    ///
+    /// This tool in `src/tools` checks up on various bits and pieces of style and
+    /// otherwise just implements a few lint-like checks that are specific to the
+    /// compiler itself.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let host = self.host;
+
+        let _folder = build.fold_output(|| "tidy");
+        println!("tidy check ({})", host);
+        let compiler = Compiler::new(0, host);
+        let mut cmd = build.tool_cmd(&compiler, "tidy");
+        cmd.arg(build.src.join("src"));
+        if !build.config.vendor {
+            cmd.arg("--no-vendor");
+        }
+        if build.config.quiet_tests {
+            cmd.arg("--quiet");
+        }
+        try_run(build, &mut cmd);
     }
     }
-    try_run(build, &mut cmd);
 }
 
 fn testdir(build: &Build, host: &str) -> PathBuf {
 }
 
 fn testdir(build: &Build, host: &str) -> PathBuf {
@@ -303,158 +396,171 @@ fn testdir(build: &Build, host: &str) -> PathBuf {
 //              "pretty", "run-fail-fulldeps");
 //    }
 
 //              "pretty", "run-fail-fulldeps");
 //    }
 
-/// Executes the `compiletest` tool to run a suite of tests.
-///
-/// Compiles all tests with `compiler` for `target` with the specified
-/// compiletest `mode` and `suite` arguments. For example `mode` can be
-/// "run-pass" or `suite` can be something like `debuginfo`.
-pub fn compiletest(build: &Build,
-                   compiler: &Compiler,
-                   target: &str,
-                   mode: &str,
-                   suite: &str) {
-    let _folder = build.fold_output(|| format!("test_{}", suite));
-    println!("Check compiletest suite={} mode={} ({} -> {})",
-             suite, mode, compiler.host, target);
-    let mut cmd = Command::new(build.tool(&Compiler::new(0, compiler.host),
-                                          "compiletest"));
-    build.prepare_tool_cmd(compiler, &mut cmd);
-
-    // compiletest currently has... a lot of arguments, so let's just pass all
-    // of them!
-
-    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
-    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
-    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
-    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
-    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
-    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
-    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
-    cmd.arg("--mode").arg(mode);
-    cmd.arg("--target").arg(target);
-    cmd.arg("--host").arg(compiler.host);
-    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.build));
-
-    if let Some(ref nodejs) = build.config.nodejs {
-        cmd.arg("--nodejs").arg(nodejs);
-    }
-
-    let mut flags = vec!["-Crpath".to_string()];
-    if build.config.rust_optimize_tests {
-        flags.push("-O".to_string());
-    }
-    if build.config.rust_debuginfo_tests {
-        flags.push("-g".to_string());
-    }
-
-    let mut hostflags = build.rustc_flags(&compiler.host);
-    hostflags.extend(flags.clone());
-    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+#[derive(Serialize)]
+pub struct Compiletest<'a> {
+    compiler: Compiler<'a>,
+    target: &'a str,
+    mode: &'a str,
+    suite: &'a str,
+}
 
 
-    let mut targetflags = build.rustc_flags(&target);
-    targetflags.extend(flags);
-    targetflags.push(format!("-Lnative={}",
-                             build.test_helpers_out(target).display()));
-    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+impl<'a> Step<'a> for Compiletest<'a> {
+    type Output = ();
+
+    /// Executes the `compiletest` tool to run a suite of tests.
+    ///
+    /// Compiles all tests with `compiler` for `target` with the specified
+    /// compiletest `mode` and `suite` arguments. For example `mode` can be
+    /// "run-pass" or `suite` can be something like `debuginfo`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        let mode = self.mode;
+        let suite = self.suite;
+        let _folder = build.fold_output(|| format!("test_{}", suite));
+        println!("Check compiletest suite={} mode={} ({} -> {})",
+                 suite, mode, compiler.host, target);
+        let mut cmd = Command::new(build.tool(&Compiler::new(0, compiler.host),
+                                              "compiletest"));
+        build.prepare_tool_cmd(compiler, &mut cmd);
+
+        // compiletest currently has... a lot of arguments, so let's just pass all
+        // of them!
+
+        cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
+        cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
+        cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
+        cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
+        cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+        cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+        cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+        cmd.arg("--mode").arg(mode);
+        cmd.arg("--target").arg(target);
+        cmd.arg("--host").arg(compiler.host);
+        cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.build));
+
+        if let Some(ref nodejs) = build.config.nodejs {
+            cmd.arg("--nodejs").arg(nodejs);
+        }
 
 
-    cmd.arg("--docck-python").arg(build.python());
+        let mut flags = vec!["-Crpath".to_string()];
+        if build.config.rust_optimize_tests {
+            flags.push("-O".to_string());
+        }
+        if build.config.rust_debuginfo_tests {
+            flags.push("-g".to_string());
+        }
 
 
-    if build.build.ends_with("apple-darwin") {
-        // Force /usr/bin/python on macOS for LLDB tests because we're loading the
-        // LLDB plugin's compiled module which only works with the system python
-        // (namely not Homebrew-installed python)
-        cmd.arg("--lldb-python").arg("/usr/bin/python");
-    } else {
-        cmd.arg("--lldb-python").arg(build.python());
-    }
+        let mut hostflags = build.rustc_flags(&compiler.host);
+        hostflags.extend(flags.clone());
+        cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+
+        let mut targetflags = build.rustc_flags(&target);
+        targetflags.extend(flags);
+        targetflags.push(format!("-Lnative={}",
+                                 build.test_helpers_out(target).display()));
+        cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+
+        cmd.arg("--docck-python").arg(build.python());
+
+        if build.build.ends_with("apple-darwin") {
+            // Force /usr/bin/python on macOS for LLDB tests because we're loading the
+            // LLDB plugin's compiled module which only works with the system python
+            // (namely not Homebrew-installed python)
+            cmd.arg("--lldb-python").arg("/usr/bin/python");
+        } else {
+            cmd.arg("--lldb-python").arg(build.python());
+        }
 
 
-    if let Some(ref gdb) = build.config.gdb {
-        cmd.arg("--gdb").arg(gdb);
-    }
-    if let Some(ref vers) = build.lldb_version {
-        cmd.arg("--lldb-version").arg(vers);
-    }
-    if let Some(ref dir) = build.lldb_python_dir {
-        cmd.arg("--lldb-python-dir").arg(dir);
-    }
-    let llvm_config = build.llvm_config(target);
-    let llvm_version = output(Command::new(&llvm_config).arg("--version"));
-    cmd.arg("--llvm-version").arg(llvm_version);
-    if !build.is_rust_llvm(target) {
-        cmd.arg("--system-llvm");
-    }
+        if let Some(ref gdb) = build.config.gdb {
+            cmd.arg("--gdb").arg(gdb);
+        }
+        if let Some(ref vers) = build.lldb_version {
+            cmd.arg("--lldb-version").arg(vers);
+        }
+        if let Some(ref dir) = build.lldb_python_dir {
+            cmd.arg("--lldb-python-dir").arg(dir);
+        }
+        let llvm_config = build.llvm_config(target);
+        let llvm_version = output(Command::new(&llvm_config).arg("--version"));
+        cmd.arg("--llvm-version").arg(llvm_version);
+        if !build.is_rust_llvm(target) {
+            cmd.arg("--system-llvm");
+        }
 
 
-    cmd.args(&build.flags.cmd.test_args());
+        cmd.args(&build.flags.cmd.test_args());
 
 
-    if build.is_verbose() {
-        cmd.arg("--verbose");
-    }
+        if build.is_verbose() {
+            cmd.arg("--verbose");
+        }
 
 
-    if build.config.quiet_tests {
-        cmd.arg("--quiet");
-    }
+        if build.config.quiet_tests {
+            cmd.arg("--quiet");
+        }
 
 
-    // Only pass correct values for these flags for the `run-make` suite as it
-    // requires that a C++ compiler was configured which isn't always the case.
-    if suite == "run-make" {
-        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
-        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
-        cmd.arg("--cc").arg(build.cc(target))
-           .arg("--cxx").arg(build.cxx(target).unwrap())
-           .arg("--cflags").arg(build.cflags(target).join(" "))
-           .arg("--llvm-components").arg(llvm_components.trim())
-           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
-    } else {
-        cmd.arg("--cc").arg("")
-           .arg("--cxx").arg("")
-           .arg("--cflags").arg("")
-           .arg("--llvm-components").arg("")
-           .arg("--llvm-cxxflags").arg("");
-    }
+        // Only pass correct values for these flags for the `run-make` suite as it
+        // requires that a C++ compiler was configured which isn't always the case.
+        if suite == "run-make" {
+            let llvm_components = output(Command::new(&llvm_config).arg("--components"));
+            let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
+            cmd.arg("--cc").arg(build.cc(target))
+               .arg("--cxx").arg(build.cxx(target).unwrap())
+               .arg("--cflags").arg(build.cflags(target).join(" "))
+               .arg("--llvm-components").arg(llvm_components.trim())
+               .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
+        } else {
+            cmd.arg("--cc").arg("")
+               .arg("--cxx").arg("")
+               .arg("--cflags").arg("")
+               .arg("--llvm-components").arg("")
+               .arg("--llvm-cxxflags").arg("");
+        }
 
 
-    if build.remote_tested(target) {
-        cmd.arg("--remote-test-client")
-           .arg(build.tool(&Compiler::new(0, &build.build),
-                           "remote-test-client"));
-    }
+        if build.remote_tested(target) {
+            cmd.arg("--remote-test-client")
+               .arg(build.tool(&Compiler::new(0, &build.build),
+                               "remote-test-client"));
+        }
 
 
-    // Running a C compiler on MSVC requires a few env vars to be set, to be
-    // sure to set them here.
-    //
-    // Note that if we encounter `PATH` we make sure to append to our own `PATH`
-    // rather than stomp over it.
-    if target.contains("msvc") {
-        for &(ref k, ref v) in build.cc[target].0.env() {
-            if k != "PATH" {
-                cmd.env(k, v);
+        // Running a C compiler on MSVC requires a few env vars to be set, to be
+        // sure to set them here.
+        //
+        // Note that if we encounter `PATH` we make sure to append to our own `PATH`
+        // rather than stomp over it.
+        if target.contains("msvc") {
+            for &(ref k, ref v) in build.cc[target].0.env() {
+                if k != "PATH" {
+                    cmd.env(k, v);
+                }
             }
         }
             }
         }
-    }
-    cmd.env("RUSTC_BOOTSTRAP", "1");
-    build.add_rust_test_threads(&mut cmd);
+        cmd.env("RUSTC_BOOTSTRAP", "1");
+        build.add_rust_test_threads(&mut cmd);
 
 
-    if build.config.sanitizers {
-        cmd.env("SANITIZER_SUPPORT", "1");
-    }
+        if build.config.sanitizers {
+            cmd.env("SANITIZER_SUPPORT", "1");
+        }
 
 
-    if build.config.profiler {
-        cmd.env("PROFILER_SUPPORT", "1");
-    }
+        if build.config.profiler {
+            cmd.env("PROFILER_SUPPORT", "1");
+        }
 
 
-    cmd.arg("--adb-path").arg("adb");
-    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
-    if target.contains("android") {
-        // Assume that cc for this target comes from the android sysroot
-        cmd.arg("--android-cross-path")
-           .arg(build.cc(target).parent().unwrap().parent().unwrap());
-    } else {
-        cmd.arg("--android-cross-path").arg("");
-    }
+        cmd.arg("--adb-path").arg("adb");
+        cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
+        if target.contains("android") {
+            // Assume that cc for this target comes from the android sysroot
+            cmd.arg("--android-cross-path")
+               .arg(build.cc(target).parent().unwrap().parent().unwrap());
+        } else {
+            cmd.arg("--android-cross-path").arg("");
+        }
 
 
-    build.ci_env.force_coloring_in_ci(&mut cmd);
+        build.ci_env.force_coloring_in_ci(&mut cmd);
 
 
-    let _time = util::timeit();
-    try_run(build, &mut cmd);
+        let _time = util::timeit();
+        try_run(build, &mut cmd);
+    }
 }
 
 // rules.test("check-docs", "src/doc")
 }
 
 // rules.test("check-docs", "src/doc")
@@ -500,28 +606,41 @@ pub fn docs(build: &Build, compiler: &Compiler) {
 //     .default(true)
 //     .host(true)
 //     .run(move |s| check::error_index(build, &s.compiler()));
 //     .default(true)
 //     .host(true)
 //     .run(move |s| check::error_index(build, &s.compiler()));
-/// Run the error index generator tool to execute the tests located in the error
-/// index.
-///
-/// The `error_index_generator` tool lives in `src/tools` and is used to
-/// generate a markdown file from the error indexes of the code base which is
-/// then passed to `rustdoc --test`.
-pub fn error_index(build: &Build, compiler: &Compiler) {
-    let _folder = build.fold_output(|| "test_error_index");
-    println!("Testing error-index stage{}", compiler.stage);
 
 
-    let dir = testdir(build, compiler.host);
-    t!(fs::create_dir_all(&dir));
-    let output = dir.join("error-index.md");
-
-    let _time = util::timeit();
-    build.run(build.tool_cmd(&Compiler::new(0, compiler.host),
-                             "error_index_generator")
-                   .arg("markdown")
-                   .arg(&output)
-                   .env("CFG_BUILD", &build.build));
+#[derive(Serialize)]
+pub struct ErrorIndex<'a> {
+    compiler: Compiler<'a>,
+}
 
 
-    markdown_test(build, compiler, &output);
+impl<'a> Step<'a> for ErrorIndex<'a> {
+    type Output = ();
+
+    /// Run the error index generator tool to execute the tests located in the error
+    /// index.
+    ///
+    /// The `error_index_generator` tool lives in `src/tools` and is used to
+    /// generate a markdown file from the error indexes of the code base which is
+    /// then passed to `rustdoc --test`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+
+        let _folder = build.fold_output(|| "test_error_index");
+        println!("Testing error-index stage{}", compiler.stage);
+
+        let dir = testdir(build, compiler.host);
+        t!(fs::create_dir_all(&dir));
+        let output = dir.join("error-index.md");
+
+        let _time = util::timeit();
+        build.run(build.tool_cmd(&Compiler::new(0, compiler.host),
+                                "error_index_generator")
+                    .arg("markdown")
+                    .arg(&output)
+                    .env("CFG_BUILD", &build.build));
+
+        markdown_test(build, compiler, &output);
+    }
 }
 
 fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
 }
 
 fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
@@ -612,120 +731,135 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
 //         .run(move |s| check::krate(build, &s.compiler(), s.target,
 //                                    Mode::Librustc, TestKind::Test, None));
 
 //         .run(move |s| check::krate(build, &s.compiler(), s.target,
 //                                    Mode::Librustc, TestKind::Test, None));
 
-/// Run all unit tests plus documentation tests for an entire crate DAG defined
-/// by a `Cargo.toml`
-///
-/// This is what runs tests for crates like the standard library, compiler, etc.
-/// It essentially is the driver for running `cargo test`.
-///
-/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
-/// arguments, and those arguments are discovered from `cargo metadata`.
-pub fn krate(build: &Build,
-             compiler: &Compiler,
-             target: &str,
-             mode: Mode,
-             test_kind: TestKind,
-             krate: Option<&str>) {
-    let (name, path, features, root) = match mode {
-        Mode::Libstd => {
-            ("libstd", "src/libstd", build.std_features(), "std")
-        }
-        Mode::Libtest => {
-            ("libtest", "src/libtest", String::new(), "test")
-        }
-        Mode::Librustc => {
-            ("librustc", "src/rustc", build.rustc_features(), "rustc-main")
-        }
-        _ => panic!("can only test libraries"),
-    };
-    let _folder = build.fold_output(|| {
-        format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
-    });
-    println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
-             compiler.host, target);
-
-    // If we're not doing a full bootstrap but we're testing a stage2 version of
-    // libstd, then what we're actually testing is the libstd produced in
-    // stage1. Reflect that here by updating the compiler that we're working
-    // with automatically.
-    let compiler = if build.force_use_stage1(compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler.clone()
-    };
-
-    // Build up the base `cargo test` command.
-    //
-    // Pass in some standard flags then iterate over the graph we've discovered
-    // in `cargo metadata` with the maps above and figure out what `-p`
-    // arguments need to get passed.
-    let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand());
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(path).join("Cargo.toml"))
-         .arg("--features").arg(features);
-    if test_kind.subcommand() == "test" && !build.fail_fast {
-        cargo.arg("--no-fail-fast");
-    }
+#[derive(Serialize)]
+pub struct Krate<'a> {
+    compiler: Compiler<'a>,
+    target: &'a str,
+    mode: Mode,
+    test_kind: TestKind,
+    krate: Option<&'a str>,
+}
 
 
-    match krate {
-        Some(krate) => {
-            cargo.arg("-p").arg(krate);
-        }
-        None => {
-            let mut visited = HashSet::new();
-            let mut next = vec![root];
-            while let Some(name) = next.pop() {
-                // Right now jemalloc is our only target-specific crate in the
-                // sense that it's not present on all platforms. Custom skip it
-                // here for now, but if we add more this probably wants to get
-                // more generalized.
-                //
-                // Also skip `build_helper` as it's not compiled normally for
-                // target during the bootstrap and it's just meant to be a
-                // helper crate, not tested. If it leaks through then it ends up
-                // messing with various mtime calculations and such.
-                if !name.contains("jemalloc") && name != "build_helper" {
-                    cargo.arg("-p").arg(&format!("{}:0.0.0", name));
-                }
-                for dep in build.crates[name].deps.iter() {
-                    if visited.insert(dep) {
-                        next.push(dep);
+impl<'a> Step<'a> for Krate<'a> {
+    type Output = ();
+
+    /// Run all unit tests plus documentation tests for an entire crate DAG defined
+    /// by a `Cargo.toml`
+    ///
+    /// This is what runs tests for crates like the standard library, compiler, etc.
+    /// It essentially is the driver for running `cargo test`.
+    ///
+    /// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
+    /// arguments, and those arguments are discovered from `cargo metadata`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        let mode = self.mode;
+        let test_kind = self.test_kind;
+        let krate = self.krate;
+
+        let (name, path, features, root) = match mode {
+            Mode::Libstd => {
+                ("libstd", "src/libstd", build.std_features(), "std")
+            }
+            Mode::Libtest => {
+                ("libtest", "src/libtest", String::new(), "test")
+            }
+            Mode::Librustc => {
+                ("librustc", "src/rustc", build.rustc_features(), "rustc-main")
+            }
+            _ => panic!("can only test libraries"),
+        };
+        let _folder = build.fold_output(|| {
+            format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
+        });
+        println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
+                compiler.host, target);
+
+        // If we're not doing a full bootstrap but we're testing a stage2 version of
+        // libstd, then what we're actually testing is the libstd produced in
+        // stage1. Reflect that here by updating the compiler that we're working
+        // with automatically.
+        let compiler = if build.force_use_stage1(compiler, target) {
+            Compiler::new(1, compiler.host)
+        } else {
+            compiler.clone()
+        };
+
+        // Build up the base `cargo test` command.
+        //
+        // Pass in some standard flags then iterate over the graph we've discovered
+        // in `cargo metadata` with the maps above and figure out what `-p`
+        // arguments need to get passed.
+        let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand());
+        cargo.arg("--manifest-path")
+            .arg(build.src.join(path).join("Cargo.toml"))
+            .arg("--features").arg(features);
+        if test_kind.subcommand() == "test" && !build.fail_fast {
+            cargo.arg("--no-fail-fast");
+        }
+
+        match krate {
+            Some(krate) => {
+                cargo.arg("-p").arg(krate);
+            }
+            None => {
+                let mut visited = HashSet::new();
+                let mut next = vec![root];
+                while let Some(name) = next.pop() {
+                    // Right now jemalloc is our only target-specific crate in the
+                    // sense that it's not present on all platforms. Custom skip it
+                    // here for now, but if we add more this probably wants to get
+                    // more generalized.
+                    //
+                    // Also skip `build_helper` as it's not compiled normally for
+                    // target during the bootstrap and it's just meant to be a
+                    // helper crate, not tested. If it leaks through then it ends up
+                    // messing with various mtime calculations and such.
+                    if !name.contains("jemalloc") && name != "build_helper" {
+                        cargo.arg("-p").arg(&format!("{}:0.0.0", name));
+                    }
+                    for dep in build.crates[name].deps.iter() {
+                        if visited.insert(dep) {
+                            next.push(dep);
+                        }
                     }
                 }
             }
         }
                     }
                 }
             }
         }
-    }
-
-    // The tests are going to run with the *target* libraries, so we need to
-    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
-    //
-    // Note that to run the compiler we need to run with the *host* libraries,
-    // but our wrapper scripts arrange for that to be the case anyway.
-    let mut dylib_path = dylib_path();
-    dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
-    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
-
-    if target.contains("emscripten") || build.remote_tested(target) {
-        cargo.arg("--no-run");
-    }
 
 
-    cargo.arg("--");
+        // The tests are going to run with the *target* libraries, so we need to
+        // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+        //
+        // Note that to run the compiler we need to run with the *host* libraries,
+        // but our wrapper scripts arrange for that to be the case anyway.
+        let mut dylib_path = dylib_path();
+        dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
+        cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+        if target.contains("emscripten") || build.remote_tested(target) {
+            cargo.arg("--no-run");
+        }
 
 
-    if build.config.quiet_tests {
-        cargo.arg("--quiet");
-    }
+        cargo.arg("--");
 
 
-    let _time = util::timeit();
+        if build.config.quiet_tests {
+            cargo.arg("--quiet");
+        }
 
 
-    if target.contains("emscripten") {
-        build.run(&mut cargo);
-        krate_emscripten(build, &compiler, target, mode);
-    } else if build.remote_tested(target) {
-        build.run(&mut cargo);
-        krate_remote(build, &compiler, target, mode);
-    } else {
-        cargo.args(&build.flags.cmd.test_args());
-        try_run(build, &mut cargo);
+        let _time = util::timeit();
+
+        if target.contains("emscripten") {
+            build.run(&mut cargo);
+            krate_emscripten(build, &compiler, target, mode);
+        } else if build.remote_tested(target) {
+            build.run(&mut cargo);
+            krate_remote(build, &compiler, target, mode);
+        } else {
+            cargo.args(&build.flags.cmd.test_args());
+            try_run(build, &mut cargo);
+        }
     }
 }
 
     }
 }
 
@@ -816,38 +950,52 @@ fn find_tests(dir: &Path, target: &str) -> Vec<PathBuf> {
 //         })
 //         .run(move |s| check::remote_copy_libs(build, &s.compiler(), s.target));
 //
 //         })
 //         .run(move |s| check::remote_copy_libs(build, &s.compiler(), s.target));
 //
-pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
-    if !build.remote_tested(target) {
-        return
-    }
 
 
-    println!("REMOTE copy libs to emulator ({})", target);
-    t!(fs::create_dir_all(build.out.join("tmp")));
+#[derive(Serialize)]
+pub struct RemoteCopyLibs<'a> {
+    compiler: Compiler<'a>,
+    target: &'a str,
+}
 
 
-    let server = build.cargo_out(compiler, Mode::Tool, target)
-                      .join(exe("remote-test-server", target));
+impl<'a> Step<'a> for RemoteCopyLibs<'a> {
+    type Output = ();
 
 
-    // Spawn the emulator and wait for it to come online
-    let tool = build.tool(&Compiler::new(0, &build.build),
-                          "remote-test-client");
-    let mut cmd = Command::new(&tool);
-    cmd.arg("spawn-emulator")
-       .arg(target)
-       .arg(&server)
-       .arg(build.out.join("tmp"));
-    if let Some(rootfs) = build.qemu_rootfs(target) {
-        cmd.arg(rootfs);
-    }
-    build.run(&mut cmd);
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        if !build.remote_tested(target) {
+            return
+        }
+
+        println!("REMOTE copy libs to emulator ({})", target);
+        t!(fs::create_dir_all(build.out.join("tmp")));
 
 
-    // Push all our dylibs to the emulator
-    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
-        let f = t!(f);
-        let name = f.file_name().into_string().unwrap();
-        if util::is_dylib(&name) {
-            build.run(Command::new(&tool)
-                              .arg("push")
-                              .arg(f.path()));
+        let server = build.cargo_out(compiler, Mode::Tool, target)
+                          .join(exe("remote-test-server", target));
+
+        // Spawn the emulator and wait for it to come online
+        let tool = build.tool(&Compiler::new(0, &build.build),
+                              "remote-test-client");
+        let mut cmd = Command::new(&tool);
+        cmd.arg("spawn-emulator")
+           .arg(target)
+           .arg(&server)
+           .arg(build.out.join("tmp"));
+        if let Some(rootfs) = build.qemu_rootfs(target) {
+            cmd.arg(rootfs);
+        }
+        build.run(&mut cmd);
+
+        // Push all our dylibs to the emulator
+        for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
+            let f = t!(f);
+            let name = f.file_name().into_string().unwrap();
+            if util::is_dylib(&name) {
+                build.run(Command::new(&tool)
+                                  .arg("push")
+                                  .arg(f.path()));
+            }
         }
     }
 }
         }
     }
 }
@@ -857,56 +1005,65 @@ pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
 //     .dep(|s| s.name("dist-src"))
 //     .run(move |_| check::distcheck(build));
 
 //     .dep(|s| s.name("dist-src"))
 //     .run(move |_| check::distcheck(build));
 
-/// Run "distcheck", a 'make check' from a tarball
-pub fn distcheck(build: &Build) {
-    if build.build != "x86_64-unknown-linux-gnu" {
-        return
-    }
-    if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
-        return
-    }
-    if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
-        return
-    }
+#[derive(Serialize)]
+pub struct Distcheck;
 
 
-    println!("Distcheck");
-    let dir = build.out.join("tmp").join("distcheck");
-    let _ = fs::remove_dir_all(&dir);
-    t!(fs::create_dir_all(&dir));
-
-    let mut cmd = Command::new("tar");
-    cmd.arg("-xzf")
-       .arg(dist::rust_src_location(build))
-       .arg("--strip-components=1")
-       .current_dir(&dir);
-    build.run(&mut cmd);
-    build.run(Command::new("./configure")
-                     .args(&build.config.configure_args)
-                     .arg("--enable-vendor")
-                     .current_dir(&dir));
-    build.run(Command::new(build_helper::make(&build.build))
-                     .arg("check")
-                     .current_dir(&dir));
-
-    // Now make sure that rust-src has all of libstd's dependencies
-    println!("Distcheck rust-src");
-    let dir = build.out.join("tmp").join("distcheck-src");
-    let _ = fs::remove_dir_all(&dir);
-    t!(fs::create_dir_all(&dir));
-
-    let mut cmd = Command::new("tar");
-    cmd.arg("-xzf")
-       .arg(dist::rust_src_installer(build))
-       .arg("--strip-components=1")
-       .current_dir(&dir);
-    build.run(&mut cmd);
-
-    let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
-    build.run(Command::new(&build.initial_cargo)
-                     .arg("generate-lockfile")
-                     .arg("--manifest-path")
-                     .arg(&toml)
-                     .current_dir(&dir));
+impl<'a> Step<'a> for Distcheck {
+    type Output = ();
+
+    /// Run "distcheck", a 'make check' from a tarball
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+
+        if build.build != "x86_64-unknown-linux-gnu" {
+            return
+        }
+        if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
+            return
+        }
+        if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
+            return
+        }
+
+        println!("Distcheck");
+        let dir = build.out.join("tmp").join("distcheck");
+        let _ = fs::remove_dir_all(&dir);
+        t!(fs::create_dir_all(&dir));
+
+        let mut cmd = Command::new("tar");
+        cmd.arg("-xzf")
+           .arg(dist::rust_src_location(build))
+           .arg("--strip-components=1")
+           .current_dir(&dir);
+        build.run(&mut cmd);
+        build.run(Command::new("./configure")
+                         .args(&build.config.configure_args)
+                         .arg("--enable-vendor")
+                         .current_dir(&dir));
+        build.run(Command::new(build_helper::make(&build.build))
+                         .arg("check")
+                         .current_dir(&dir));
+
+        // Now make sure that rust-src has all of libstd's dependencies
+        println!("Distcheck rust-src");
+        let dir = build.out.join("tmp").join("distcheck-src");
+        let _ = fs::remove_dir_all(&dir);
+        t!(fs::create_dir_all(&dir));
+
+        let mut cmd = Command::new("tar");
+        cmd.arg("-xzf")
+           .arg(dist::rust_src_installer(build))
+           .arg("--strip-components=1")
+           .current_dir(&dir);
+        build.run(&mut cmd);
+
+        let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
+        build.run(Command::new(&build.initial_cargo)
+                         .arg("generate-lockfile")
+                         .arg("--manifest-path")
+                         .arg(&toml)
+                         .current_dir(&dir));
+    }
 }
 
 //rules.test("check-bootstrap", "src/bootstrap")
 }
 
 //rules.test("check-bootstrap", "src/bootstrap")
@@ -914,18 +1071,26 @@ pub fn distcheck(build: &Build) {
 //     .host(true)
 //     .only_build(true)
 //     .run(move |_| check::bootstrap(build));
 //     .host(true)
 //     .only_build(true)
 //     .run(move |_| check::bootstrap(build));
-//
-/// Test the build system itself
-pub fn bootstrap(build: &Build) {
-    let mut cmd = Command::new(&build.initial_cargo);
-    cmd.arg("test")
-       .current_dir(build.src.join("src/bootstrap"))
-       .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
-       .env("RUSTC_BOOTSTRAP", "1")
-       .env("RUSTC", &build.initial_rustc);
-    if !build.fail_fast {
-        cmd.arg("--no-fail-fast");
+
+#[derive(Serialize)]
+pub struct Bootstrap;
+
+impl<'a> for Step<'a> Bootstrap {
+    type Output = ();
+
+    /// Test the build system itself
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let mut cmd = Command::new(&build.initial_cargo);
+        cmd.arg("test")
+           .current_dir(build.src.join("src/bootstrap"))
+           .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
+           .env("RUSTC_BOOTSTRAP", "1")
+           .env("RUSTC", &build.initial_rustc);
+        if !build.fail_fast {
+            cmd.arg("--no-fail-fast");
+        }
+        cmd.arg("--").args(&build.flags.cmd.test_args());
+        try_run(build, &mut cmd);
     }
     }
-    cmd.arg("--").args(&build.flags.cmd.test_args());
-    try_run(build, &mut cmd);
 }
 }
index 4a972ebf8df9d9532f08abd3534863db01fd3ecd..cf1e11f7ac8205f943c48d2749f61a5756939130 100644 (file)
 use util::{exe, libdir, is_dylib, copy};
 use {Build, Compiler, Mode};
 
 use util::{exe, libdir, is_dylib, copy};
 use {Build, Compiler, Mode};
 
-//    for (krate, path, _default) in krates("std") {
-//        rules.build(&krate.build_step, path)
-//             .dep(|s| s.name("startup-objects"))
-//             .dep(move |s| s.name("rustc").host(&build.build).target(s.host))
-//             .run(move |s| compile::std(build, s.target, &s.compiler()));
-//    }
-//    for (krate, path, _default) in krates("test") {
-//        rules.build(&krate.build_step, path)
-//             .dep(|s| s.name("libstd-link"))
-//             .run(move |s| compile::test(build, s.target, &s.compiler()));
-//    }
-//    for (krate, path, _default) in krates("rustc-main") {
-//        rules.build(&krate.build_step, path)
-//             .dep(|s| s.name("libtest-link"))
-//             .dep(move |s| s.name("llvm").host(&build.build).stage(0))
-//             .dep(|s| s.name("may-run-build-script"))
-//             .run(move |s| compile::rustc(build, s.target, &s.compiler()));
-//    }
 //
 //    // Crates which have build scripts need to rely on this rule to ensure that
 //    // the necessary prerequisites for a build script are linked and located in
 //
 //    // Crates which have build scripts need to rely on this rule to ensure that
 //    // the necessary prerequisites for a build script are linked and located in
@@ -147,65 +129,84 @@ fn crate_rule<'a, 'b>(build: &'a Build,
         rule
 }
 
         rule
 }
 
-/// Build the standard library.
-///
-/// This will build the standard library for a particular stage of the build
-/// using the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn std(build: &Build, target: &str, compiler: &Compiler) {
-    let libdir = build.sysroot_libdir(compiler, target);
-    t!(fs::create_dir_all(&libdir));
-
-    let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
-    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
-    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
-    let mut features = build.std_features();
-
-    if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
-        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
-    }
+//    for (krate, path, _default) in krates("std") {
+//        rules.build(&krate.build_step, path)
+//             .dep(|s| s.name("startup-objects"))
+//             .dep(move |s| s.name("rustc").host(&build.build).target(s.host))
+//             .run(move |s| compile::std(build, s.target, &s.compiler()));
+//    }
+#[derive(Serialize)]
+pub struct Std<'a> {
+    pub target: &'a str,
+    pub compiler: &'a Compiler<'a>,
+}
 
 
-    // When doing a local rebuild we tell cargo that we're stage1 rather than
-    // stage0. This works fine if the local rust and being-built rust have the
-    // same view of what the default allocator is, but fails otherwise. Since
-    // we don't have a way to express an allocator preference yet, work
-    // around the issue in the case of a local rebuild with jemalloc disabled.
-    if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
-        features.push_str(" force_alloc_system");
-    }
+impl<'a> Step<'a> for Std<'a> {
+    type Output = ();
+
+    /// Build the standard library.
+    ///
+    /// This will build the standard library for a particular stage of the build
+    /// using the `compiler` targeting the `target` architecture. The artifacts
+    /// created will also be linked into the sysroot directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let compiler = self.compiler;
+        let libdir = build.sysroot_libdir(compiler, target);
+        t!(fs::create_dir_all(&libdir));
+
+        let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
+        println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
+                compiler.host, target);
+
+        let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+        build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
+        let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
+        let mut features = build.std_features();
+
+        if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
+            cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
+        }
 
 
-    if compiler.stage != 0 && build.config.sanitizers {
-        // This variable is used by the sanitizer runtime crates, e.g.
-        // rustc_lsan, to build the sanitizer runtime from C code
-        // When this variable is missing, those crates won't compile the C code,
-        // so we don't set this variable during stage0 where llvm-config is
-        // missing
-        // We also only build the runtimes when --enable-sanitizers (or its
-        // config.toml equivalent) is used
-        cargo.env("LLVM_CONFIG", build.llvm_config(target));
-    }
-    cargo.arg("--features").arg(features)
-         .arg("--manifest-path")
-         .arg(build.src.join("src/libstd/Cargo.toml"));
+        // When doing a local rebuild we tell cargo that we're stage1 rather than
+        // stage0. This works fine if the local rust and being-built rust have the
+        // same view of what the default allocator is, but fails otherwise. Since
+        // we don't have a way to express an allocator preference yet, work
+        // around the issue in the case of a local rebuild with jemalloc disabled.
+        if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
+            features.push_str(" force_alloc_system");
+        }
 
 
-    if let Some(target) = build.config.target_config.get(target) {
-        if let Some(ref jemalloc) = target.jemalloc {
-            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+        if compiler.stage != 0 && build.config.sanitizers {
+            // This variable is used by the sanitizer runtime crates, e.g.
+            // rustc_lsan, to build the sanitizer runtime from C code
+            // When this variable is missing, those crates won't compile the C code,
+            // so we don't set this variable during stage0 where llvm-config is
+            // missing
+            // We also only build the runtimes when --enable-sanitizers (or its
+            // config.toml equivalent) is used
+            cargo.env("LLVM_CONFIG", build.llvm_config(target));
         }
         }
-    }
-    if target.contains("musl") {
-        if let Some(p) = build.musl_root(target) {
-            cargo.env("MUSL_ROOT", p);
+        cargo.arg("--features").arg(features)
+            .arg("--manifest-path")
+            .arg(build.src.join("src/libstd/Cargo.toml"));
+
+        if let Some(target) = build.config.target_config.get(target) {
+            if let Some(ref jemalloc) = target.jemalloc {
+                cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+            }
+        }
+        if target.contains("musl") {
+            if let Some(p) = build.musl_root(target) {
+                cargo.env("MUSL_ROOT", p);
+            }
         }
         }
-    }
 
 
-    run_cargo(build,
-              &mut cargo,
-              &libstd_stamp(build, &compiler, target));
+        run_cargo(build,
+                &mut cargo,
+                &libstd_stamp(build, &compiler, target));
+    }
 }
 
 
 }
 
 
@@ -216,36 +217,49 @@ pub fn std(build: &Build, target: &str, compiler: &Compiler) {
 //            compile::std_link)
 //     .dep(|s| s.name("startup-objects"))
 //     .dep(|s| s.name("create-sysroot").target(s.host));
 //            compile::std_link)
 //     .dep(|s| s.name("startup-objects"))
 //     .dep(|s| s.name("create-sysroot").target(s.host));
-/// Link all libstd rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated by `compiler` to a the `stage` compiler's
-/// sysroot for the specified `host` and `target`.
-///
-/// Note that this assumes that `compiler` has already generated the libstd
-/// libraries for `target`, and this method will find them in the relevant
-/// output directory.
-pub fn std_link(build: &Build,
-                compiler: &Compiler,
-                target_compiler: &Compiler,
-                target: &str) {
-    println!("Copying stage{} std from stage{} ({} -> {} / {})",
-             target_compiler.stage,
-             compiler.stage,
-             compiler.host,
-             target_compiler.host,
-             target);
-    let libdir = build.sysroot_libdir(target_compiler, target);
-    add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
-
-    if target.contains("musl") && !target.contains("mips") {
-        copy_musl_third_party_objects(build, target, &libdir);
-    }
 
 
-    if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
-        // The sanitizers are only built in stage1 or above, so the dylibs will
-        // be missing in stage0 and causes panic. See the `std()` function above
-        // for reason why the sanitizers are not built in stage0.
-        copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir);
+#[derive(Serialize)]
+pub struct StdLink<'a> {
+    pub compiler: Compiler<'a>,
+    pub target_compiler: Compiler<'a>,
+    pub target: &'a str,
+}
+
+impl<'a> Step<'a> for StdLink<'a> {
+    type Output = ();
+
+    /// Link all libstd rlibs/dylibs into the sysroot location.
+    ///
+    /// Links those artifacts generated by `compiler` to a the `stage` compiler's
+    /// sysroot for the specified `host` and `target`.
+    ///
+    /// Note that this assumes that `compiler` has already generated the libstd
+    /// libraries for `target`, and this method will find them in the relevant
+    /// output directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target_compiler = self.target_compiler;
+        let target = self.target;
+        println!("Copying stage{} std from stage{} ({} -> {} / {})",
+                target_compiler.stage,
+                compiler.stage,
+                compiler.host,
+                target_compiler.host,
+                target);
+        let libdir = build.sysroot_libdir(target_compiler, target);
+        add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
+
+        if target.contains("musl") && !target.contains("mips") {
+            copy_musl_third_party_objects(build, target, &libdir);
+        }
+
+        if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
+            // The sanitizers are only built in stage1 or above, so the dylibs will
+            // be missing in stage0 and causes panic. See the `std()` function above
+            // for reason why the sanitizers are not built in stage0.
+            copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir);
+        }
     }
 }
 
     }
 }
 
@@ -274,66 +288,97 @@ fn copy_apple_sanitizer_dylibs(native_dir: &Path, platform: &str, into: &Path) {
 //      .dep(|s| s.name("create-sysroot").target(s.host))
 //      .run(move |s| compile::build_startup_objects(build, &s.compiler(), s.target));
 
 //      .dep(|s| s.name("create-sysroot").target(s.host))
 //      .run(move |s| compile::build_startup_objects(build, &s.compiler(), s.target));
 
-/// Build and prepare startup objects like rsbegin.o and rsend.o
-///
-/// These are primarily used on Windows right now for linking executables/dlls.
-/// They don't require any library support as they're just plain old object
-/// files, so we just use the nightly snapshot compiler to always build them (as
-/// no other compilers are guaranteed to be available).
-pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &str) {
-    if !target.contains("pc-windows-gnu") {
-        return
-    }
+#[derive(Serialize)]
+pub struct StartupObjects<'a> {
+    pub for_compiler: Compiler<'a>,
+    pub target: &'a str,
+}
 
 
-    let compiler = Compiler::new(0, &build.build);
-    let compiler_path = build.compiler_path(&compiler);
-    let src_dir = &build.src.join("src/rtstartup");
-    let dst_dir = &build.native_dir(target).join("rtstartup");
-    let sysroot_dir = &build.sysroot_libdir(for_compiler, target);
-    t!(fs::create_dir_all(dst_dir));
-    t!(fs::create_dir_all(sysroot_dir));
-
-    for file in &["rsbegin", "rsend"] {
-        let src_file = &src_dir.join(file.to_string() + ".rs");
-        let dst_file = &dst_dir.join(file.to_string() + ".o");
-        if !up_to_date(src_file, dst_file) {
-            let mut cmd = Command::new(&compiler_path);
-            build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
-                        .arg("--cfg").arg(format!("stage{}", compiler.stage))
-                        .arg("--target").arg(target)
-                        .arg("--emit=obj")
-                        .arg("--out-dir").arg(dst_dir)
-                        .arg(src_file));
+impl<'a> Step<'a> for StartupObjects<'a> {
+    type Output = ();
+
+    /// Build and prepare startup objects like rsbegin.o and rsend.o
+    ///
+    /// These are primarily used on Windows right now for linking executables/dlls.
+    /// They don't require any library support as they're just plain old object
+    /// files, so we just use the nightly snapshot compiler to always build them (as
+    /// no other compilers are guaranteed to be available).
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let for_compiler = self.for_compiler;
+        let target = self.target;
+        if !target.contains("pc-windows-gnu") {
+            return
         }
 
         }
 
-        copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
-    }
+        let compiler = Compiler::new(0, &build.build);
+        let compiler_path = build.compiler_path(&compiler);
+        let src_dir = &build.src.join("src/rtstartup");
+        let dst_dir = &build.native_dir(target).join("rtstartup");
+        let sysroot_dir = &build.sysroot_libdir(for_compiler, target);
+        t!(fs::create_dir_all(dst_dir));
+        t!(fs::create_dir_all(sysroot_dir));
+
+        for file in &["rsbegin", "rsend"] {
+            let src_file = &src_dir.join(file.to_string() + ".rs");
+            let dst_file = &dst_dir.join(file.to_string() + ".o");
+            if !up_to_date(src_file, dst_file) {
+                let mut cmd = Command::new(&compiler_path);
+                build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
+                            .arg("--cfg").arg(format!("stage{}", compiler.stage))
+                            .arg("--target").arg(target)
+                            .arg("--emit=obj")
+                            .arg("--out-dir").arg(dst_dir)
+                            .arg(src_file));
+            }
 
 
-    for obj in ["crt2.o", "dllcrt2.o"].iter() {
-        copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
+            copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
+        }
+
+        for obj in ["crt2.o", "dllcrt2.o"].iter() {
+            copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
+        }
     }
 }
 
     }
 }
 
-/// Build libtest.
-///
-/// This will build libtest and supporting libraries for a particular stage of
-/// the build using the `compiler` targeting the `target` architecture. The
-/// artifacts created will also be linked into the sysroot directory.
-pub fn test(build: &Build, target: &str, compiler: &Compiler) {
-    let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
-    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
-    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
-    if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
-        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
+//    for (krate, path, _default) in krates("test") {
+//        rules.build(&krate.build_step, path)
+//             .dep(|s| s.name("libstd-link"))
+//             .run(move |s| compile::test(build, s.target, &s.compiler()));
+//    }
+#[derive(Serialize)]
+pub struct Test<'a> {
+    pub compiler: Compiler<'a>,
+    pub target: &'a str,
+}
+
+impl<'a> Step<'a> for Test<'a> {
+    type Output = ();
+
+    /// Build libtest.
+    ///
+    /// This will build libtest and supporting libraries for a particular stage of
+    /// the build using the `compiler` targeting the `target` architecture. The
+    /// artifacts created will also be linked into the sysroot directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let compiler = self.compiler;
+        let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
+        println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
+                compiler.host, target);
+        let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+        build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+        let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
+        if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
+            cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
+        }
+        cargo.arg("--manifest-path")
+            .arg(build.src.join("src/libtest/Cargo.toml"));
+        run_cargo(build,
+                &mut cargo,
+                &libtest_stamp(build, compiler, target));
     }
     }
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/libtest/Cargo.toml"));
-    run_cargo(build,
-              &mut cargo,
-              &libtest_stamp(build, compiler, target));
 }
 
 
 }
 
 
@@ -344,98 +389,131 @@ pub fn test(build: &Build, target: &str, compiler: &Compiler) {
 //            compile::test_link)
 //     .dep(|s| s.name("libstd-link"));
 
 //            compile::test_link)
 //     .dep(|s| s.name("libstd-link"));
 
-/// Same as `std_link`, only for libtest
-pub fn test_link(build: &Build,
-                 compiler: &Compiler,
-                 target_compiler: &Compiler,
-                 target: &str) {
-    println!("Copying stage{} test from stage{} ({} -> {} / {})",
-             target_compiler.stage,
-             compiler.stage,
-             compiler.host,
-             target_compiler.host,
-             target);
-    add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
-                   &libtest_stamp(build, compiler, target));
+#[derive(Serialize)]
+pub struct TestLink<'a> {
+    pub compiler: Compiler<'a>,
+    pub target_compiler: Compiler<'a>,
+    pub target: &'a str,
 }
 
 }
 
-/// Build the compiler.
-///
-/// This will build the compiler for a particular stage of the build using
-/// the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
-    let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
-    println!("Building stage{} compiler artifacts ({} -> {})",
-             compiler.stage, compiler.host, target);
-
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));
-
-    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
-    cargo.arg("--features").arg(build.rustc_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"));
-
-    // Set some configuration variables picked up by build scripts and
-    // the compiler alike
-    cargo.env("CFG_RELEASE", build.rust_release())
-         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
-         .env("CFG_VERSION", build.rust_version())
-         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
-
-    if compiler.stage == 0 {
-        cargo.env("CFG_LIBDIR_RELATIVE", "lib");
-    } else {
-        let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
-        cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
+impl<'a> Step<'a> for Step<'a> {
+    type Output = ();
+
+    /// Same as `std_link`, only for libtest
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target_compiler = self.target_compiler;
+        let target = self.target;
+        println!("Copying stage{} test from stage{} ({} -> {} / {})",
+                target_compiler.stage,
+                compiler.stage,
+                compiler.host,
+                target_compiler.host,
+                target);
+        add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
+                    &libtest_stamp(build, compiler, target));
     }
     }
+}
 
 
-    // If we're not building a compiler with debugging information then remove
-    // these two env vars which would be set otherwise.
-    if build.config.rust_debuginfo_only_std {
-        cargo.env_remove("RUSTC_DEBUGINFO");
-        cargo.env_remove("RUSTC_DEBUGINFO_LINES");
-    }
+//    for (krate, path, _default) in krates("rustc-main") {
+//        rules.build(&krate.build_step, path)
+//             .dep(|s| s.name("libtest-link"))
+//             .dep(move |s| s.name("llvm").host(&build.build).stage(0))
+//             .dep(|s| s.name("may-run-build-script"))
+//             .run(move |s| compile::rustc(build, s.target, &s.compiler()));
+//    }
 
 
-    if let Some(ref ver_date) = build.rust_info.commit_date() {
-        cargo.env("CFG_VER_DATE", ver_date);
-    }
-    if let Some(ref ver_hash) = build.rust_info.sha() {
-        cargo.env("CFG_VER_HASH", ver_hash);
-    }
-    if !build.unstable_features() {
-        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
-    }
-    // Flag that rust llvm is in use
-    if build.is_rust_llvm(target) {
-        cargo.env("LLVM_RUSTLLVM", "1");
-    }
-    cargo.env("LLVM_CONFIG", build.llvm_config(target));
-    let target_config = build.config.target_config.get(target);
-    if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-        cargo.env("CFG_LLVM_ROOT", s);
-    }
-    // Building with a static libstdc++ is only supported on linux right now,
-    // not for MSVC or macOS
-    if build.config.llvm_static_stdcpp &&
-       !target.contains("windows") &&
-       !target.contains("apple") {
-        cargo.env("LLVM_STATIC_STDCPP",
-                  compiler_file(build.cxx(target).unwrap(), "libstdc++.a"));
-    }
-    if build.config.llvm_link_shared {
-        cargo.env("LLVM_LINK_SHARED", "1");
-    }
-    if let Some(ref s) = build.config.rustc_default_linker {
-        cargo.env("CFG_DEFAULT_LINKER", s);
-    }
-    if let Some(ref s) = build.config.rustc_default_ar {
-        cargo.env("CFG_DEFAULT_AR", s);
+#[derive(Serialize)]
+pub struct Rustc<'a> {
+    pub compiler: Compiler<'a>,
+    pub target: &'a str,
+}
+
+impl<'a> Step<'a> for Rustc<'a> {
+    type Output = ();
+
+    /// Build the compiler.
+    ///
+    /// This will build the compiler for a particular stage of the build using
+    /// the `compiler` targeting the `target` architecture. The artifacts
+    /// created will also be linked into the sysroot directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
+        println!("Building stage{} compiler artifacts ({} -> {})",
+                 compiler.stage, compiler.host, target);
+
+        let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+        build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));
+
+        let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
+        cargo.arg("--features").arg(build.rustc_features())
+             .arg("--manifest-path")
+             .arg(build.src.join("src/rustc/Cargo.toml"));
+
+        // Set some configuration variables picked up by build scripts and
+        // the compiler alike
+        cargo.env("CFG_RELEASE", build.rust_release())
+             .env("CFG_RELEASE_CHANNEL", &build.config.channel)
+             .env("CFG_VERSION", build.rust_version())
+             .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
+
+        if compiler.stage == 0 {
+            cargo.env("CFG_LIBDIR_RELATIVE", "lib");
+        } else {
+            let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
+            cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
+        }
+
+        // If we're not building a compiler with debugging information then remove
+        // these two env vars which would be set otherwise.
+        if build.config.rust_debuginfo_only_std {
+            cargo.env_remove("RUSTC_DEBUGINFO");
+            cargo.env_remove("RUSTC_DEBUGINFO_LINES");
+        }
+
+        if let Some(ref ver_date) = build.rust_info.commit_date() {
+            cargo.env("CFG_VER_DATE", ver_date);
+        }
+        if let Some(ref ver_hash) = build.rust_info.sha() {
+            cargo.env("CFG_VER_HASH", ver_hash);
+        }
+        if !build.unstable_features() {
+            cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
+        }
+        // Flag that rust llvm is in use
+        if build.is_rust_llvm(target) {
+            cargo.env("LLVM_RUSTLLVM", "1");
+        }
+        cargo.env("LLVM_CONFIG", build.llvm_config(target));
+        let target_config = build.config.target_config.get(target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            cargo.env("CFG_LLVM_ROOT", s);
+        }
+        // Building with a static libstdc++ is only supported on linux right now,
+        // not for MSVC or macOS
+        if build.config.llvm_static_stdcpp &&
+           !target.contains("windows") &&
+           !target.contains("apple") {
+            cargo.env("LLVM_STATIC_STDCPP",
+                      compiler_file(build.cxx(target).unwrap(), "libstdc++.a"));
+        }
+        if build.config.llvm_link_shared {
+            cargo.env("LLVM_LINK_SHARED", "1");
+        }
+        if let Some(ref s) = build.config.rustc_default_linker {
+            cargo.env("CFG_DEFAULT_LINKER", s);
+        }
+        if let Some(ref s) = build.config.rustc_default_ar {
+            cargo.env("CFG_DEFAULT_AR", s);
+        }
+        run_cargo(build,
+                  &mut cargo,
+                  &librustc_stamp(build, compiler, target));
     }
     }
-    run_cargo(build,
-              &mut cargo,
-              &librustc_stamp(build, compiler, target));
 }
 
 // crate_rule(build,
 }
 
 // crate_rule(build,
@@ -444,19 +522,31 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
 //            "build-crate-rustc-main",
 //            compile::rustc_link)
 //     .dep(|s| s.name("libtest-link"));
 //            "build-crate-rustc-main",
 //            compile::rustc_link)
 //     .dep(|s| s.name("libtest-link"));
-/// Same as `std_link`, only for librustc
-pub fn rustc_link(build: &Build,
-                  compiler: &Compiler,
-                  target_compiler: &Compiler,
-                  target: &str) {
-    println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
-             target_compiler.stage,
-             compiler.stage,
-             compiler.host,
-             target_compiler.host,
-             target);
-    add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
-                   &librustc_stamp(build, compiler, target));
+#[derive(Serialize)]
+pub struct RustcLink<'a> {
+    pub compiler: Compiler<'a>,
+    pub target_compiler: Compiler<'a>,
+    pub target: &'a str,
+}
+
+impl<'a> Step<'a> for RustcLink<'a> {
+    type Output = ();
+
+    /// Same as `std_link`, only for librustc
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target_compiler = self.target_compiler;
+        let target = self.target;
+        println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
+                 target_compiler.stage,
+                 compiler.stage,
+                 compiler.host,
+                 target_compiler.host,
+                 target);
+        add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
+                       &librustc_stamp(build, compiler, target));
+    }
 }
 
 /// Cargo's output path for the standard library in a given stage, compiled
 }
 
 /// Cargo's output path for the standard library in a given stage, compiled
@@ -485,10 +575,28 @@ fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
 
 // rules.build("create-sysroot", "path/to/nowhere")
 //      .run(move |s| compile::create_sysroot(build, &s.compiler()));
 
 // rules.build("create-sysroot", "path/to/nowhere")
 //      .run(move |s| compile::create_sysroot(build, &s.compiler()));
-pub fn create_sysroot(build: &Build, compiler: &Compiler) {
-    let sysroot = build.sysroot(compiler);
-    let _ = fs::remove_dir_all(&sysroot);
-    t!(fs::create_dir_all(&sysroot));
+
+#[derive(Serialize)]
+pub struct Sysroot<'a> {
+    pub compiler: Compiler<'a>,
+}
+
+impl<'a> Step<'a> for Sysroot<'a> {
+    type Output = ();
+
+    /// Returns the sysroot for the `compiler` specified that *this build system
+    /// generates*.
+    ///
+    /// That is, the sysroot for the stage0 compiler is not what the compiler
+    /// thinks it is by default, but it's the same as the default for stages
+    /// 1-3.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let sysroot = build.sysroot(compiler);
+        let _ = fs::remove_dir_all(&sysroot);
+        t!(fs::create_dir_all(&sysroot));
+    }
 }
 
 // the compiler with no target libraries ready to go
 }
 
 // the compiler with no target libraries ready to go
@@ -504,54 +612,68 @@ pub fn create_sysroot(build: &Build, compiler: &Compiler) {
 //          }
 //      })
 //      .run(move |s| compile::assemble_rustc(build, s.stage, s.target));
 //          }
 //      })
 //      .run(move |s| compile::assemble_rustc(build, s.stage, s.target));
-/// Prepare a new compiler from the artifacts in `stage`
-///
-/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
-/// must have been previously produced by the `stage - 1` build.build
-/// compiler.
-pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
-    // nothing to do in stage0
-    if stage == 0 {
-        return
-    }
 
 
-    println!("Copying stage{} compiler ({})", stage, host);
+#[derive(Serialize)]
+pub struct Assemble<'a> {
+    pub stage: u32,
+    pub host: &'a str,
+}
 
 
-    // The compiler that we're assembling
-    let target_compiler = Compiler::new(stage, host);
+impl<'a> Step<'a> for Assemble<'a> {
+    type Output = ();
+
+    /// Prepare a new compiler from the artifacts in `stage`
+    ///
+    /// This will assemble a compiler in `build/$host/stage$stage`. The compiler
+    /// must have been previously produced by the `stage - 1` build.build
+    /// compiler.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let host = self.host;
+        // nothing to do in stage0
+        if stage == 0 {
+            return
+        }
+
+        println!("Copying stage{} compiler ({})", stage, host);
 
 
-    // The compiler that compiled the compiler we're assembling
-    let build_compiler = Compiler::new(stage - 1, &build.build);
+        // The compiler that we're assembling
+        let target_compiler = Compiler::new(stage, host);
 
 
-    // Link in all dylibs to the libdir
-    let sysroot = build.sysroot(&target_compiler);
-    let sysroot_libdir = sysroot.join(libdir(host));
-    t!(fs::create_dir_all(&sysroot_libdir));
-    let src_libdir = build.sysroot_libdir(&build_compiler, host);
-    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
-        let filename = f.file_name().into_string().unwrap();
-        if is_dylib(&filename) {
-            copy(&f.path(), &sysroot_libdir.join(&filename));
+        // The compiler that compiled the compiler we're assembling
+        let build_compiler = Compiler::new(stage - 1, &build.build);
+
+        // Link in all dylibs to the libdir
+        let sysroot = build.sysroot(&target_compiler);
+        let sysroot_libdir = sysroot.join(libdir(host));
+        t!(fs::create_dir_all(&sysroot_libdir));
+        let src_libdir = build.sysroot_libdir(&build_compiler, host);
+        for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
+            let filename = f.file_name().into_string().unwrap();
+            if is_dylib(&filename) {
+                copy(&f.path(), &sysroot_libdir.join(&filename));
+            }
         }
         }
-    }
 
 
-    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
-
-    // Link the compiler binary itself into place
-    let rustc = out_dir.join(exe("rustc", host));
-    let bindir = sysroot.join("bin");
-    t!(fs::create_dir_all(&bindir));
-    let compiler = build.compiler_path(&target_compiler);
-    let _ = fs::remove_file(&compiler);
-    copy(&rustc, &compiler);
-
-    // See if rustdoc exists to link it into place
-    let rustdoc = exe("rustdoc", host);
-    let rustdoc_src = out_dir.join(&rustdoc);
-    let rustdoc_dst = bindir.join(&rustdoc);
-    if fs::metadata(&rustdoc_src).is_ok() {
-        let _ = fs::remove_file(&rustdoc_dst);
-        copy(&rustdoc_src, &rustdoc_dst);
+        let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
+
+        // Link the compiler binary itself into place
+        let rustc = out_dir.join(exe("rustc", host));
+        let bindir = sysroot.join("bin");
+        t!(fs::create_dir_all(&bindir));
+        let compiler = build.compiler_path(&target_compiler);
+        let _ = fs::remove_file(&compiler);
+        copy(&rustc, &compiler);
+
+        // See if rustdoc exists to link it into place
+        let rustdoc = exe("rustdoc", host);
+        let rustdoc_src = out_dir.join(&rustdoc);
+        let rustdoc_dst = bindir.join(&rustdoc);
+        if fs::metadata(&rustdoc_src).is_ok() {
+            let _ = fs::remove_file(&rustdoc_dst);
+            copy(&rustdoc_src, &rustdoc_dst);
+        }
     }
 }
 
     }
 }
 
@@ -574,160 +696,6 @@ fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
     }
 }
 
     }
 }
 
-//// ========================================================================
-//// Build tools
-////
-//// Tools used during the build system but not shipped
-//// "pseudo rule" which represents completely cleaning out the tools dir in
-//// one stage. This needs to happen whenever a dependency changes (e.g.
-//// libstd, libtest, librustc) and all of the tool compilations above will
-//// be sequenced after this rule.
-//rules.build("maybe-clean-tools", "path/to/nowhere")
-//     .after("librustc-tool")
-//     .after("libtest-tool")
-//     .after("libstd-tool");
-//
-//rules.build("librustc-tool", "path/to/nowhere")
-//     .dep(|s| s.name("librustc"))
-//     .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Librustc));
-//rules.build("libtest-tool", "path/to/nowhere")
-//     .dep(|s| s.name("libtest"))
-//     .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libtest));
-//rules.build("libstd-tool", "path/to/nowhere")
-//     .dep(|s| s.name("libstd"))
-//     .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libstd));
-//
-/// Build a tool in `src/tools`
-///
-/// This will build the specified tool with the specified `host` compiler in
-/// `stage` into the normal cargo output directory.
-pub fn maybe_clean_tools(build: &Build, stage: u32, target: &str, mode: Mode) {
-    let compiler = Compiler::new(stage, &build.build);
-
-    let stamp = match mode {
-        Mode::Libstd => libstd_stamp(build, &compiler, target),
-        Mode::Libtest => libtest_stamp(build, &compiler, target),
-        Mode::Librustc => librustc_stamp(build, &compiler, target),
-        _ => panic!(),
-    };
-    let out_dir = build.cargo_out(&compiler, Mode::Tool, target);
-    build.clear_if_dirty(&out_dir, &stamp);
-}
-
-
-// rules.build("tool-rustbook", "src/tools/rustbook")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("librustc-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "rustbook"));
-// rules.build("tool-error-index", "src/tools/error_index_generator")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("librustc-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator"));
-// rules.build("tool-unstable-book-gen", "src/tools/unstable-book-gen")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "unstable-book-gen"));
-// rules.build("tool-tidy", "src/tools/tidy")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "tidy"));
-// rules.build("tool-linkchecker", "src/tools/linkchecker")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "linkchecker"));
-// rules.build("tool-cargotest", "src/tools/cargotest")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "cargotest"));
-// rules.build("tool-compiletest", "src/tools/compiletest")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libtest-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
-// rules.build("tool-build-manifest", "src/tools/build-manifest")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
-// rules.build("tool-remote-test-server", "src/tools/remote-test-server")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server"));
-// rules.build("tool-remote-test-client", "src/tools/remote-test-client")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
-// rules.build("tool-rust-installer", "src/tools/rust-installer")
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
-// rules.build("tool-cargo", "src/tools/cargo")
-//      .host(true)
-//      .default(build.config.extended)
-//      .dep(|s| s.name("maybe-clean-tools"))
-//      .dep(|s| s.name("libstd-tool"))
-//      .dep(|s| s.stage(0).host(s.target).name("openssl"))
-//      .dep(move |s| {
-//          // Cargo depends on procedural macros, which requires a full host
-//          // compiler to be available, so we need to depend on that.
-//          s.name("librustc-link")
-//           .target(&build.build)
-//           .host(&build.build)
-//      })
-//      .run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
-// rules.build("tool-rls", "src/tools/rls")
-//      .host(true)
-//      .default(build.config.extended)
-//      .dep(|s| s.name("librustc-tool"))
-//      .dep(|s| s.stage(0).host(s.target).name("openssl"))
-//      .dep(move |s| {
-//          // rls, like cargo, uses procedural macros
-//          s.name("librustc-link")
-//           .target(&build.build)
-//           .host(&build.build)
-//      })
-//      .run(move |s| compile::tool(build, s.stage, s.target, "rls"));
-//
-
-/// Build a tool in `src/tools`
-///
-/// This will build the specified tool with the specified `host` compiler in
-/// `stage` into the normal cargo output directory.
-pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {
-    let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
-    println!("Building stage{} tool {} ({})", stage, tool, target);
-
-    let compiler = Compiler::new(stage, &build.build);
-
-    let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
-    let dir = build.src.join("src/tools").join(tool);
-    cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
-
-    // We don't want to build tools dynamically as they'll be running across
-    // stages and such and it's just easier if they're not dynamically linked.
-    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
-    if let Some(dir) = build.openssl_install_dir(target) {
-        cargo.env("OPENSSL_STATIC", "1");
-        cargo.env("OPENSSL_DIR", dir);
-        cargo.env("LIBZ_SYS_STATIC", "1");
-    }
-
-    cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
-
-    let info = GitInfo::new(&dir);
-    if let Some(sha) = info.sha() {
-        cargo.env("CFG_COMMIT_HASH", sha);
-    }
-    if let Some(sha_short) = info.sha_short() {
-        cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
-    }
-    if let Some(date) = info.commit_date() {
-        cargo.env("CFG_COMMIT_DATE", date);
-    }
-
-    build.run(&mut cargo);
-}
-
-
 // Avoiding a dependency on winapi to keep compile times down
 #[cfg(unix)]
 fn stderr_isatty() -> bool {
 // Avoiding a dependency on winapi to keep compile times down
 #[cfg(unix)]
 fn stderr_isatty() -> bool {
index 2cf3ca73952ac36b42accd5e22d9309f035df926..8aa9ad7021e6f632511b02a6e8b77ea00232fd2c 100644 (file)
@@ -67,46 +67,61 @@ fn rust_installer(build: &Build) -> Command {
 //      .dep(|s| s.name("default:doc"))
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::docs(build, s.stage, s.target));
 //      .dep(|s| s.name("default:doc"))
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::docs(build, s.stage, s.target));
-/// Builds the `rust-docs` installer component.
-///
-/// Slurps up documentation from the `stage`'s `host`.
-pub fn docs(build: &Build, stage: u32, host: &str) {
-    println!("Dist docs stage{} ({})", stage, host);
-    if !build.config.docs {
-        println!("\tskipping - docs disabled");
-        return
-    }
 
 
-    let name = pkgname(build, "rust-docs");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
+#[derive(Serialize)]
+pub struct Docs<'a> {
+    stage: u32,
+    host: &'a str,
+}
 
 
-    let dst = image.join("share/doc/rust/html");
-    t!(fs::create_dir_all(&dst));
-    let src = build.out.join(host).join("doc");
-    cp_r(&src, &dst);
+impl<'a> Step<'a> for Docs<'a> {
+    type Output = ();
+
+    /// Builds the `rust-docs` installer component.
+    ///
+    /// Slurps up documentation from the `stage`'s `host`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let host = self.host;
+
+        println!("Dist docs stage{} ({})", stage, host);
+        if !build.config.docs {
+            println!("\tskipping - docs disabled");
+            return
+        }
 
 
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust-Documentation")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-documentation-is-installed.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-docs")
-       .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg("--bulk-dirs=share/doc/rust/html");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
+        let name = pkgname(build, "rust-docs");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+        let _ = fs::remove_dir_all(&image);
 
 
-    // As part of this step, *also* copy the docs directory to a directory which
-    // buildbot typically uploads.
-    if host == build.build {
-        let dst = distdir(build).join("doc").join(build.rust_package_vers());
+        let dst = image.join("share/doc/rust/html");
         t!(fs::create_dir_all(&dst));
         t!(fs::create_dir_all(&dst));
+        let src = build.out.join(host).join("doc");
         cp_r(&src, &dst);
         cp_r(&src, &dst);
+
+        let mut cmd = rust_installer(build);
+        cmd.arg("generate")
+           .arg("--product-name=Rust-Documentation")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-documentation-is-installed.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}-{}", name, host))
+           .arg("--component-name=rust-docs")
+           .arg("--legacy-manifest-dirs=rustlib,cargo")
+           .arg("--bulk-dirs=share/doc/rust/html");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+
+        // As part of this step, *also* copy the docs directory to a directory which
+        // buildbot typically uploads.
+        if host == build.build {
+            let dst = distdir(build).join("doc").join(build.rust_package_vers());
+            t!(fs::create_dir_all(&dst));
+            cp_r(&src, &dst);
+        }
     }
 }
 
     }
 }
 
@@ -245,37 +260,48 @@ fn copy_to_folder(src: &Path, dest_folder: &Path) {
 //              dist::mingw(build, s.target)
 //          }
 //      });
 //              dist::mingw(build, s.target)
 //          }
 //      });
-//
-/// Build the `rust-mingw` installer component.
-///
-/// This contains all the bits and pieces to run the MinGW Windows targets
-/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
-pub fn mingw(build: &Build, host: &str) {
-    println!("Dist mingw ({})", host);
-    let name = pkgname(build, "rust-mingw");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-    t!(fs::create_dir_all(&image));
 
 
-    // The first argument is a "temporary directory" which is just
-    // thrown away (this contains the runtime DLLs included in the rustc package
-    // above) and the second argument is where to place all the MinGW components
-    // (which is what we want).
-    make_win_dist(&tmpdir(build), &image, host, &build);
+#[derive(Serialize)]
+pub struct Mingw<'a> {
+    host: &'a str,
+}
 
 
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust-MinGW")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-MinGW-is-installed.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-mingw")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
+impl<'a> Step<'a> for Mingw<'a> {
+    type Output = ();
+
+    /// Build the `rust-mingw` installer component.
+    ///
+    /// This contains all the bits and pieces to run the MinGW Windows targets
+    /// without any extra installed software (e.g. we bundle gcc, libraries, etc).
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let host = self.host;
+        println!("Dist mingw ({})", host);
+        let name = pkgname(build, "rust-mingw");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+        let _ = fs::remove_dir_all(&image);
+        t!(fs::create_dir_all(&image));
+
+        // The first argument is a "temporary directory" which is just
+        // thrown away (this contains the runtime DLLs included in the rustc package
+        // above) and the second argument is where to place all the MinGW components
+        // (which is what we want).
+        make_win_dist(&tmpdir(build), &image, host, &build);
+
+        let mut cmd = rust_installer(build);
+        cmd.arg("generate")
+           .arg("--product-name=Rust-MinGW")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-MinGW-is-installed.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}-{}", name, host))
+           .arg("--component-name=rust-mingw")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+    }
 }
 
 // rules.dist("dist-rustc", "src/librustc")
 }
 
 // rules.dist("dist-rustc", "src/librustc")
@@ -285,138 +311,166 @@ pub fn mingw(build: &Build, host: &str) {
 //      .default(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::rustc(build, s.stage, s.target));
 //      .default(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::rustc(build, s.stage, s.target));
-/// Creates the `rustc` installer component.
-pub fn rustc(build: &Build, stage: u32, host: &str) {
-    println!("Dist rustc stage{} ({})", stage, host);
-    let name = pkgname(build, "rustc");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
-    let _ = fs::remove_dir_all(&overlay);
-
-    // Prepare the rustc "image", what will actually end up getting installed
-    prepare_image(build, stage, host, &image);
-
-    // Prepare the overlay which is part of the tarball but won't actually be
-    // installed
-    let cp = |file: &str| {
-        install(&build.src.join(file), &overlay, 0o644);
-    };
-    cp("COPYRIGHT");
-    cp("LICENSE-APACHE");
-    cp("LICENSE-MIT");
-    cp("README.md");
-    // tiny morsel of metadata is used by rust-packaging
-    let version = build.rust_version();
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // On MinGW we've got a few runtime DLL dependencies that we need to
-    // include. The first argument to this script is where to put these DLLs
-    // (the image we're creating), and the second argument is a junk directory
-    // to ignore all other MinGW stuff the script creates.
-    //
-    // On 32-bit MinGW we're always including a DLL which needs some extra
-    // licenses to distribute. On 64-bit MinGW we don't actually distribute
-    // anything requiring us to distribute a license, but it's likely the
-    // install will *also* include the rust-mingw package, which also needs
-    // licenses, so to be safe we just include it here in all MinGW packages.
-    if host.contains("pc-windows-gnu") {
-        make_win_dist(&image, &tmpdir(build), host, build);
-
-        let dst = image.join("share/doc");
-        t!(fs::create_dir_all(&dst));
-        cp_r(&build.src.join("src/etc/third-party"), &dst);
-    }
 
 
-    // Finally, wrap everything up in a nice tarball!
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg("--non-installed-overlay").arg(&overlay)
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rustc")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-    t!(fs::remove_dir_all(&overlay));
-
-    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
-        let src = build.sysroot(&Compiler::new(stage, host));
-        let libdir = libdir(host);
-
-        // Copy rustc/rustdoc binaries
-        t!(fs::create_dir_all(image.join("bin")));
-        cp_r(&src.join("bin"), &image.join("bin"));
-
-        // Copy runtime DLLs needed by the compiler
-        if libdir != "bin" {
-            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
-                let name = entry.file_name();
-                if let Some(s) = name.to_str() {
-                    if is_dylib(s) {
-                        install(&entry.path(), &image.join(libdir), 0o644);
-                    }
-                }
-            }
-        }
-
-        // Man pages
-        t!(fs::create_dir_all(image.join("share/man/man1")));
-        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
-
-        // Debugger scripts
-        debugger_scripts(build, &image, host);
+#[derive(Serialize)]
+pub struct Rustc<'a> {
+    stage: u32,
+    host: &'a str,
+}
 
 
-        // Misc license info
+impl<'a> Step<'a> for Rustc<'a> {
+    type Output = ();
+
+    /// Creates the `rustc` installer component.
+    fn run(self, builder: &builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let host = self.host;
+        println!("Dist rustc stage{} ({})", stage, host);
+        let name = pkgname(build, "rustc");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, host));
+        let _ = fs::remove_dir_all(&image);
+        let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
+        let _ = fs::remove_dir_all(&overlay);
+
+        // Prepare the rustc "image", what will actually end up getting installed
+        prepare_image(build, stage, host, &image);
+
+        // Prepare the overlay which is part of the tarball but won't actually be
+        // installed
         let cp = |file: &str| {
         let cp = |file: &str| {
-            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+            install(&build.src.join(file), &overlay, 0o644);
         };
         cp("COPYRIGHT");
         cp("LICENSE-APACHE");
         cp("LICENSE-MIT");
         cp("README.md");
         };
         cp("COPYRIGHT");
         cp("LICENSE-APACHE");
         cp("LICENSE-MIT");
         cp("README.md");
+        // tiny morsel of metadata is used by rust-packaging
+        let version = build.rust_version();
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+        // On MinGW we've got a few runtime DLL dependencies that we need to
+        // include. The first argument to this script is where to put these DLLs
+        // (the image we're creating), and the second argument is a junk directory
+        // to ignore all other MinGW stuff the script creates.
+        //
+        // On 32-bit MinGW we're always including a DLL which needs some extra
+        // licenses to distribute. On 64-bit MinGW we don't actually distribute
+        // anything requiring us to distribute a license, but it's likely the
+        // install will *also* include the rust-mingw package, which also needs
+        // licenses, so to be safe we just include it here in all MinGW packages.
+        if host.contains("pc-windows-gnu") {
+            make_win_dist(&image, &tmpdir(build), host, build);
+
+            let dst = image.join("share/doc");
+            t!(fs::create_dir_all(&dst));
+            cp_r(&build.src.join("src/etc/third-party"), &dst);
+        }
+
+        // Finally, wrap everything up in a nice tarball!
+        let mut cmd = rust_installer(build);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-is-ready-to-roll.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg("--non-installed-overlay").arg(&overlay)
+           .arg(format!("--package-name={}-{}", name, host))
+           .arg("--component-name=rustc")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+        t!(fs::remove_dir_all(&overlay));
+
+        fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
+            let src = build.sysroot(&Compiler::new(stage, host));
+            let libdir = libdir(host);
+
+            // Copy rustc/rustdoc binaries
+            t!(fs::create_dir_all(image.join("bin")));
+            cp_r(&src.join("bin"), &image.join("bin"));
+
+            // Copy runtime DLLs needed by the compiler
+            if libdir != "bin" {
+                for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
+                    let name = entry.file_name();
+                    if let Some(s) = name.to_str() {
+                        if is_dylib(s) {
+                            install(&entry.path(), &image.join(libdir), 0o644);
+                        }
+                    }
+                }
+            }
+
+            // Man pages
+            t!(fs::create_dir_all(image.join("share/man/man1")));
+            cp_r(&build.src.join("man"), &image.join("share/man/man1"));
+
+            // Debugger scripts
+            debugger_scripts(build, &image, host);
+
+            // Misc license info
+            let cp = |file: &str| {
+                install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+            };
+            cp("COPYRIGHT");
+            cp("LICENSE-APACHE");
+            cp("LICENSE-MIT");
+            cp("README.md");
+        }
     }
 }
 
     }
 }
 
+
+
 //rules.test("debugger-scripts", "src/etc/lldb_batchmode.py")
 //     .run(move |s| dist::debugger_scripts(build, &build.sysroot(&s.compiler()),
 //                                     s.target));
 //rules.test("debugger-scripts", "src/etc/lldb_batchmode.py")
 //     .run(move |s| dist::debugger_scripts(build, &build.sysroot(&s.compiler()),
 //                                     s.target));
-/// Copies debugger scripts for `host` into the `sysroot` specified.
-pub fn debugger_scripts(build: &Build,
-                        sysroot: &Path,
-                        host: &str) {
-    let dst = sysroot.join("lib/rustlib/etc");
-    t!(fs::create_dir_all(&dst));
-    let cp_debugger_script = |file: &str| {
-        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
-    };
-    if host.contains("windows-msvc") {
-        // windbg debugger scripts
-        install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
-            0o755);
-
-        cp_debugger_script("natvis/liballoc.natvis");
-        cp_debugger_script("natvis/libcore.natvis");
-    } else {
-        cp_debugger_script("debugger_pretty_printers_common.py");
 
 
-        // gdb debugger scripts
-        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
-                0o755);
+#[derive(Serialize)]
+pub struct DebuggerScripts<'a> {
+    sysroot: &'a Path,
+    host: &'a str,
+}
 
 
-        cp_debugger_script("gdb_load_rust_pretty_printers.py");
-        cp_debugger_script("gdb_rust_pretty_printing.py");
+impl<'a> Step<'a> for DebuggerScripts<'a> {
+    type Output = ();
 
 
-        // lldb debugger scripts
-        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+    /// Copies debugger scripts for `host` into the `sysroot` specified.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let host = self.host;
+        let sysroot = self.sysroot;
+        let dst = sysroot.join("lib/rustlib/etc");
+        t!(fs::create_dir_all(&dst));
+        let cp_debugger_script = |file: &str| {
+            install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+        };
+        if host.contains("windows-msvc") {
+            // windbg debugger scripts
+            install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
                 0o755);
 
                 0o755);
 
-        cp_debugger_script("lldb_rust_formatters.py");
+            cp_debugger_script("natvis/liballoc.natvis");
+            cp_debugger_script("natvis/libcore.natvis");
+        } else {
+            cp_debugger_script("debugger_pretty_printers_common.py");
+
+            // gdb debugger scripts
+            install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+                    0o755);
+
+            cp_debugger_script("gdb_load_rust_pretty_printers.py");
+            cp_debugger_script("gdb_rust_pretty_printing.py");
+
+            // lldb debugger scripts
+            install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+                    0o755);
+
+            cp_debugger_script("lldb_rust_formatters.py");
+        }
     }
 }
 
     }
 }
 
@@ -435,18 +489,6 @@ pub fn debugger_scripts(build: &Build,
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::std(build, &s.compiler(), s.target));
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::std(build, &s.compiler(), s.target));
-/// Creates the `rust-std` installer component as compiled by `compiler` for the
-/// target `target`.
-pub fn std(build: &Build, compiler: &Compiler, target: &str) {
-    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
-             target);
-
-    // The only true set of target libraries came from the build triple, so
-    // let's reduce redundant work by only producing archives from that host.
-    if compiler.host != build.build {
-        println!("\tskipping, not a build host");
-        return
-    }
 
     let name = pkgname(build, "rust-std");
     let image = tmpdir(build).join(format!("{}-{}-image", name, target));
 
     let name = pkgname(build, "rust-std");
     let image = tmpdir(build).join(format!("{}-{}-image", name, target));
@@ -491,48 +533,62 @@ pub fn rust_src_installer(build: &Build) -> PathBuf {
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::analysis(build, &s.compiler(), s.target));
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::analysis(build, &s.compiler(), s.target));
-/// Creates a tarball of save-analysis metadata, if available.
-pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
-    assert!(build.config.extended);
-    println!("Dist analysis");
-
-    if compiler.host != build.build {
-        println!("\tskipping, not a build host");
-        return;
-    }
 
 
-    // Package save-analysis from stage1 if not doing a full bootstrap, as the
-    // stage2 artifacts is simply copied from stage1 in that case.
-    let compiler = if build.force_use_stage1(compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler.clone()
-    };
+#[derive(Serialize)]
+pub struct Analysis<'a> {
+    compiler: Compiler<'a>,
+    target: &'a str,
+}
 
 
-    let name = pkgname(build, "rust-analysis");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+impl<'a> Step<'a> for Analysis<'a> {
+    type Output = ();
 
 
-    let src = build.stage_out(&compiler, Mode::Libstd).join(target).join("release").join("deps");
+    /// Creates a tarball of save-analysis metadata, if available.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        assert!(build.config.extended);
+        println!("Dist analysis");
 
 
-    let image_src = src.join("save-analysis");
-    let dst = image.join("lib/rustlib").join(target).join("analysis");
-    t!(fs::create_dir_all(&dst));
-    println!("image_src: {:?}, dst: {:?}", image_src, dst);
-    cp_r(&image_src, &dst);
+        if compiler.host != build.build {
+            println!("\tskipping, not a build host");
+            return;
+        }
 
 
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=save-analysis-saved.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg(format!("--component-name=rust-analysis-{}", target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
+        // Package save-analysis from stage1 if not doing a full bootstrap, as the
+        // stage2 artifacts is simply copied from stage1 in that case.
+        let compiler = if build.force_use_stage1(compiler, target) {
+            Compiler::new(1, compiler.host)
+        } else {
+            compiler.clone()
+        };
+
+        let name = pkgname(build, "rust-analysis");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+
+        let src = build.stage_out(&compiler, Mode::Libstd).join(target).join("release").join("deps");
+
+        let image_src = src.join("save-analysis");
+        let dst = image.join("lib/rustlib").join(target).join("analysis");
+        t!(fs::create_dir_all(&dst));
+        println!("image_src: {:?}, dst: {:?}", image_src, dst);
+        cp_r(&image_src, &dst);
+
+        let mut cmd = rust_installer(build);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=save-analysis-saved.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg(format!("--component-name=rust-analysis-{}", target))
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+    }
 }
 
 fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
 }
 
 fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
@@ -582,69 +638,78 @@ fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool {
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |_| dist::rust_src(build));
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |_| dist::rust_src(build));
-/// Creates the `rust-src` installer component
-pub fn rust_src(build: &Build) {
-    println!("Dist src");
-
-    let name = pkgname(build, "rust-src");
-    let image = tmpdir(build).join(format!("{}-image", name));
-    let _ = fs::remove_dir_all(&image);
 
 
-    let dst = image.join("lib/rustlib/src");
-    let dst_src = dst.join("rust");
-    t!(fs::create_dir_all(&dst_src));
-
-    // This is the reduced set of paths which will become the rust-src component
-    // (essentially libstd and all of its path dependencies)
-    let std_src_dirs = [
-        "src/build_helper",
-        "src/liballoc",
-        "src/liballoc_jemalloc",
-        "src/liballoc_system",
-        "src/libbacktrace",
-        "src/libcollections",
-        "src/libcompiler_builtins",
-        "src/libcore",
-        "src/liblibc",
-        "src/libpanic_abort",
-        "src/libpanic_unwind",
-        "src/librand",
-        "src/librustc_asan",
-        "src/librustc_lsan",
-        "src/librustc_msan",
-        "src/librustc_tsan",
-        "src/libstd",
-        "src/libstd_unicode",
-        "src/libunwind",
-        "src/rustc/compiler_builtins_shim",
-        "src/rustc/libc_shim",
-        "src/libtest",
-        "src/libterm",
-        "src/jemalloc",
-        "src/libprofiler_builtins",
-    ];
-    let std_src_dirs_exclude = [
-        "src/compiler-rt/test",
-        "src/jemalloc/test/unit",
-    ];
-
-    copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
+#[derive(Serialize)]
+pub struct Src;
+
+impl<'a> Step<'a> for Src {
+    type Output = ();
+
+    /// Creates the `rust-src` installer component
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        println!("Dist src");
+
+        let name = pkgname(build, "rust-src");
+        let image = tmpdir(build).join(format!("{}-image", name));
+        let _ = fs::remove_dir_all(&image);
+
+        let dst = image.join("lib/rustlib/src");
+        let dst_src = dst.join("rust");
+        t!(fs::create_dir_all(&dst_src));
+
+        // This is the reduced set of paths which will become the rust-src component
+        // (essentially libstd and all of its path dependencies)
+        let std_src_dirs = [
+            "src/build_helper",
+            "src/liballoc",
+            "src/liballoc_jemalloc",
+            "src/liballoc_system",
+            "src/libbacktrace",
+            "src/libcollections",
+            "src/libcompiler_builtins",
+            "src/libcore",
+            "src/liblibc",
+            "src/libpanic_abort",
+            "src/libpanic_unwind",
+            "src/librand",
+            "src/librustc_asan",
+            "src/librustc_lsan",
+            "src/librustc_msan",
+            "src/librustc_tsan",
+            "src/libstd",
+            "src/libstd_unicode",
+            "src/libunwind",
+            "src/rustc/compiler_builtins_shim",
+            "src/rustc/libc_shim",
+            "src/libtest",
+            "src/libterm",
+            "src/jemalloc",
+            "src/libprofiler_builtins",
+        ];
+        let std_src_dirs_exclude = [
+            "src/compiler-rt/test",
+            "src/jemalloc/test/unit",
+        ];
 
 
-    // Create source tarball in rust-installer format
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Awesome-Source.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}", name))
-       .arg("--component-name=rust-src")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
+        copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
+
+        // Create source tarball in rust-installer format
+        let mut cmd = rust_installer(build);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Awesome-Source.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}", name))
+           .arg("--component-name=rust-src")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
 
 
-    t!(fs::remove_dir_all(&image));
+        t!(fs::remove_dir_all(&image));
+    }
 }
 
 const CARGO_VENDOR_VERSION: &str = "0.1.4";
 }
 
 const CARGO_VENDOR_VERSION: &str = "0.1.4";
@@ -656,82 +721,91 @@ pub fn rust_src(build: &Build) {
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |_| dist::plain_source_tarball(build));
 //      .only_host_build(true)
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |_| dist::plain_source_tarball(build));
-/// Creates the plain source tarball
-pub fn plain_source_tarball(build: &Build) {
-    println!("Create plain source tarball");
-
-    // Make sure that the root folder of tarball has the correct name
-    let plain_name = format!("{}-src", pkgname(build, "rustc"));
-    let plain_dst_src = tmpdir(build).join(&plain_name);
-    let _ = fs::remove_dir_all(&plain_dst_src);
-    t!(fs::create_dir_all(&plain_dst_src));
-
-    // This is the set of root paths which will become part of the source package
-    let src_files = [
-        "COPYRIGHT",
-        "LICENSE-APACHE",
-        "LICENSE-MIT",
-        "CONTRIBUTING.md",
-        "README.md",
-        "RELEASES.md",
-        "configure",
-        "x.py",
-    ];
-    let src_dirs = [
-        "man",
-        "src",
-    ];
-
-    copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
 
 
-    // Copy the files normally
-    for item in &src_files {
-        copy(&build.src.join(item), &plain_dst_src.join(item));
-    }
+#[derive(Serialize)]
+pub struct PlainSourceTarball;
+
+impl<'a> Step<'a> for PlainSourceTarball {
+    type Output = ();
+
+    /// Creates the plain source tarball
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        println!("Create plain source tarball");
+
+        // Make sure that the root folder of tarball has the correct name
+        let plain_name = format!("{}-src", pkgname(build, "rustc"));
+        let plain_dst_src = tmpdir(build).join(&plain_name);
+        let _ = fs::remove_dir_all(&plain_dst_src);
+        t!(fs::create_dir_all(&plain_dst_src));
+
+        // This is the set of root paths which will become part of the source package
+        let src_files = [
+            "COPYRIGHT",
+            "LICENSE-APACHE",
+            "LICENSE-MIT",
+            "CONTRIBUTING.md",
+            "README.md",
+            "RELEASES.md",
+            "configure",
+            "x.py",
+        ];
+        let src_dirs = [
+            "man",
+            "src",
+        ];
 
 
-    // Create the version file
-    write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
+        copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
 
 
-    // If we're building from git sources, we need to vendor a complete distribution.
-    if build.rust_info.is_git() {
-        // Get cargo-vendor installed, if it isn't already.
-        let mut has_cargo_vendor = false;
-        let mut cmd = Command::new(&build.initial_cargo);
-        for line in output(cmd.arg("install").arg("--list")).lines() {
-            has_cargo_vendor |= line.starts_with("cargo-vendor ");
+        // Copy the files normally
+        for item in &src_files {
+            copy(&build.src.join(item), &plain_dst_src.join(item));
         }
         }
-        if !has_cargo_vendor {
+
+        // Create the version file
+        write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
+
+        // If we're building from git sources, we need to vendor a complete distribution.
+        if build.rust_info.is_git() {
+            // Get cargo-vendor installed, if it isn't already.
+            let mut has_cargo_vendor = false;
+            let mut cmd = Command::new(&build.initial_cargo);
+            for line in output(cmd.arg("install").arg("--list")).lines() {
+                has_cargo_vendor |= line.starts_with("cargo-vendor ");
+            }
+            if !has_cargo_vendor {
+                let mut cmd = Command::new(&build.initial_cargo);
+                cmd.arg("install")
+                   .arg("--force")
+                   .arg("--debug")
+                   .arg("--vers").arg(CARGO_VENDOR_VERSION)
+                   .arg("cargo-vendor")
+                   .env("RUSTC", &build.initial_rustc);
+                build.run(&mut cmd);
+            }
+
+            // Vendor all Cargo dependencies
             let mut cmd = Command::new(&build.initial_cargo);
             let mut cmd = Command::new(&build.initial_cargo);
-            cmd.arg("install")
-               .arg("--force")
-               .arg("--debug")
-               .arg("--vers").arg(CARGO_VENDOR_VERSION)
-               .arg("cargo-vendor")
-               .env("RUSTC", &build.initial_rustc);
+            cmd.arg("vendor")
+               .current_dir(&plain_dst_src.join("src"));
             build.run(&mut cmd);
         }
 
             build.run(&mut cmd);
         }
 
-        // Vendor all Cargo dependencies
-        let mut cmd = Command::new(&build.initial_cargo);
-        cmd.arg("vendor")
-           .current_dir(&plain_dst_src.join("src"));
+        // Create plain source tarball
+        let mut tarball = rust_src_location(build);
+        tarball.set_extension(""); // strip .gz
+        tarball.set_extension(""); // strip .tar
+        if let Some(dir) = tarball.parent() {
+            t!(fs::create_dir_all(dir));
+        }
+        let mut cmd = rust_installer(build);
+        cmd.arg("tarball")
+           .arg("--input").arg(&plain_name)
+           .arg("--output").arg(&tarball)
+           .arg("--work-dir=.")
+           .current_dir(tmpdir(build));
         build.run(&mut cmd);
     }
         build.run(&mut cmd);
     }
-
-    // Create plain source tarball
-    let mut tarball = rust_src_location(build);
-    tarball.set_extension(""); // strip .gz
-    tarball.set_extension(""); // strip .tar
-    if let Some(dir) = tarball.parent() {
-        t!(fs::create_dir_all(dir));
-    }
-    let mut cmd = rust_installer(build);
-    cmd.arg("tarball")
-       .arg("--input").arg(&plain_name)
-       .arg("--output").arg(&tarball)
-       .arg("--work-dir=.")
-       .current_dir(tmpdir(build));
-    build.run(&mut cmd);
 }
 
 fn install(src: &Path, dstdir: &Path, perms: u32) {
 }
 
 fn install(src: &Path, dstdir: &Path, perms: u32) {
@@ -779,64 +853,78 @@ fn write_file(path: &Path, data: &[u8]) {
 //      .dep(|s| s.name("tool-cargo"))
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::cargo(build, s.stage, s.target));
 //      .dep(|s| s.name("tool-cargo"))
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::cargo(build, s.stage, s.target));
-pub fn cargo(build: &Build, stage: u32, target: &str) {
-    println!("Dist cargo stage{} ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.build);
-
-    let src = build.src.join("src/tools/cargo");
-    let etc = src.join("src/etc");
-    let release_num = build.release_num("cargo");
-    let name = pkgname(build, "cargo");
-    let version = build.cargo_info.version(build, &release_num);
-
-    let tmp = tmpdir(build);
-    let image = tmp.join("cargo-image");
-    drop(fs::remove_dir_all(&image));
-    t!(fs::create_dir_all(&image));
-
-    // Prepare the image directory
-    t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
-    t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
-    let cargo = build.cargo_out(&compiler, Mode::Tool, target)
-                     .join(exe("cargo", target));
-    install(&cargo, &image.join("bin"), 0o755);
-    for man in t!(etc.join("man").read_dir()) {
-        let man = t!(man);
-        install(&man.path(), &image.join("share/man/man1"), 0o644);
+
+#[derive(Serialize)]
+pub struct Cargo<'a> {
+    stage: u32,
+    target: &'a str,
+}
+
+impl<'a> Step<'a> for Cargo<'a> {
+    type Output = ();
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        println!("Dist cargo stage{} ({})", stage, target);
+        let compiler = Compiler::new(stage, &build.build);
+
+        let src = build.src.join("src/tools/cargo");
+        let etc = src.join("src/etc");
+        let release_num = build.release_num("cargo");
+        let name = pkgname(build, "cargo");
+        let version = build.cargo_info.version(build, &release_num);
+
+        let tmp = tmpdir(build);
+        let image = tmp.join("cargo-image");
+        drop(fs::remove_dir_all(&image));
+        t!(fs::create_dir_all(&image));
+
+        // Prepare the image directory
+        t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
+        t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
+        let cargo = build.cargo_out(&compiler, Mode::Tool, target)
+                         .join(exe("cargo", target));
+        install(&cargo, &image.join("bin"), 0o755);
+        for man in t!(etc.join("man").read_dir()) {
+            let man = t!(man);
+            install(&man.path(), &image.join("share/man/man1"), 0o644);
+        }
+        install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
+        copy(&etc.join("cargo.bashcomp.sh"),
+             &image.join("etc/bash_completion.d/cargo"));
+        let doc = image.join("share/doc/cargo");
+        install(&src.join("README.md"), &doc, 0o644);
+        install(&src.join("LICENSE-MIT"), &doc, 0o644);
+        install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+        install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
+
+        // Prepare the overlay
+        let overlay = tmp.join("cargo-overlay");
+        drop(fs::remove_dir_all(&overlay));
+        t!(fs::create_dir_all(&overlay));
+        install(&src.join("README.md"), &overlay, 0o644);
+        install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+        install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+        install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+        // Generate the installer tarball
+        let mut cmd = rust_installer(build);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-is-ready-to-roll.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg("--non-installed-overlay").arg(&overlay)
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg("--component-name=cargo")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
     }
     }
-    install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
-    copy(&etc.join("cargo.bashcomp.sh"),
-         &image.join("etc/bash_completion.d/cargo"));
-    let doc = image.join("share/doc/cargo");
-    install(&src.join("README.md"), &doc, 0o644);
-    install(&src.join("LICENSE-MIT"), &doc, 0o644);
-    install(&src.join("LICENSE-APACHE"), &doc, 0o644);
-    install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
-
-    // Prepare the overlay
-    let overlay = tmp.join("cargo-overlay");
-    drop(fs::remove_dir_all(&overlay));
-    t!(fs::create_dir_all(&overlay));
-    install(&src.join("README.md"), &overlay, 0o644);
-    install(&src.join("LICENSE-MIT"), &overlay, 0o644);
-    install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
-    install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // Generate the installer tarball
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg("--non-installed-overlay").arg(&overlay)
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg("--component-name=cargo")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
 }
 
 // rules.dist("dist-rls", "rls")
 }
 
 // rules.dist("dist-rls", "rls")
@@ -845,53 +933,66 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
 //      .dep(|s| s.name("tool-rls"))
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::rls(build, s.stage, s.target));
 //      .dep(|s| s.name("tool-rls"))
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::rls(build, s.stage, s.target));
-pub fn rls(build: &Build, stage: u32, target: &str) {
-    assert!(build.config.extended);
-    println!("Dist RLS stage{} ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.build);
-
-    let src = build.src.join("src/tools/rls");
-    let release_num = build.release_num("rls");
-    let name = pkgname(build, "rls");
-    let version = build.rls_info.version(build, &release_num);
-
-    let tmp = tmpdir(build);
-    let image = tmp.join("rls-image");
-    drop(fs::remove_dir_all(&image));
-    t!(fs::create_dir_all(&image));
-
-    // Prepare the image directory
-    let rls = build.cargo_out(&compiler, Mode::Tool, target)
-                     .join(exe("rls", target));
-    install(&rls, &image.join("bin"), 0o755);
-    let doc = image.join("share/doc/rls");
-    install(&src.join("README.md"), &doc, 0o644);
-    install(&src.join("LICENSE-MIT"), &doc, 0o644);
-    install(&src.join("LICENSE-APACHE"), &doc, 0o644);
-
-    // Prepare the overlay
-    let overlay = tmp.join("rls-overlay");
-    drop(fs::remove_dir_all(&overlay));
-    t!(fs::create_dir_all(&overlay));
-    install(&src.join("README.md"), &overlay, 0o644);
-    install(&src.join("LICENSE-MIT"), &overlay, 0o644);
-    install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // Generate the installer tarball
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=RLS-ready-to-serve.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg("--non-installed-overlay").arg(&overlay)
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg("--component-name=rls")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
+#[derive(Serialize)]
+pub struct Rls<'a> {
+    stage: u32,
+    target: &'a str,
+}
+
+impl<'a> Step<'a> for Rls<'a> {
+    type Output = ();
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        assert!(build.config.extended);
+        println!("Dist RLS stage{} ({})", stage, target);
+        let compiler = Compiler::new(stage, &build.build);
+
+        let src = build.src.join("src/tools/rls");
+        let release_num = build.release_num("rls");
+        let name = pkgname(build, "rls");
+        let version = build.rls_info.version(build, &release_num);
+
+        let tmp = tmpdir(build);
+        let image = tmp.join("rls-image");
+        drop(fs::remove_dir_all(&image));
+        t!(fs::create_dir_all(&image));
+
+        // Prepare the image directory
+        let rls = build.cargo_out(&compiler, Mode::Tool, target)
+                         .join(exe("rls", target));
+        install(&rls, &image.join("bin"), 0o755);
+        let doc = image.join("share/doc/rls");
+        install(&src.join("README.md"), &doc, 0o644);
+        install(&src.join("LICENSE-MIT"), &doc, 0o644);
+        install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+
+        // Prepare the overlay
+        let overlay = tmp.join("rls-overlay");
+        drop(fs::remove_dir_all(&overlay));
+        t!(fs::create_dir_all(&overlay));
+        install(&src.join("README.md"), &overlay, 0o644);
+        install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+        install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+        // Generate the installer tarball
+        let mut cmd = rust_installer(build);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=RLS-ready-to-serve.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg("--non-installed-overlay").arg(&overlay)
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg("--component-name=rls")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+    }
 }
 
 // rules.dist("dist-extended", "extended")
 }
 
 // rules.dist("dist-extended", "extended")
@@ -908,358 +1009,372 @@ pub fn rls(build: &Build, stage: u32, target: &str) {
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::extended(build, s.stage, s.target));
 
 //      .dep(move |s| tool_rust_installer(build, s))
 //      .run(move |s| dist::extended(build, s.stage, s.target));
 
-/// Creates a combined installer for the specified target in the provided stage.
-pub fn extended(build: &Build, stage: u32, target: &str) {
-    println!("Dist extended stage{} ({})", stage, target);
-
-    let dist = distdir(build);
-    let rustc_installer = dist.join(format!("{}-{}.tar.gz",
-                                            pkgname(build, "rustc"),
-                                            target));
-    let cargo_installer = dist.join(format!("{}-{}.tar.gz",
-                                            pkgname(build, "cargo"),
-                                            target));
-    let rls_installer = dist.join(format!("{}-{}.tar.gz",
-                                          pkgname(build, "rls"),
-                                          target));
-    let analysis_installer = dist.join(format!("{}-{}.tar.gz",
-                                               pkgname(build, "rust-analysis"),
-                                               target));
-    let docs_installer = dist.join(format!("{}-{}.tar.gz",
-                                           pkgname(build, "rust-docs"),
-                                           target));
-    let mingw_installer = dist.join(format!("{}-{}.tar.gz",
-                                            pkgname(build, "rust-mingw"),
-                                            target));
-    let std_installer = dist.join(format!("{}-{}.tar.gz",
-                                          pkgname(build, "rust-std"),
-                                          target));
-
-    let tmp = tmpdir(build);
-    let overlay = tmp.join("extended-overlay");
-    let etc = build.src.join("src/etc/installer");
-    let work = tmp.join("work");
-
-    let _ = fs::remove_dir_all(&overlay);
-    install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
-    install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
-    install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
-    let version = build.rust_version();
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-    install(&etc.join("README.md"), &overlay, 0o644);
-
-    // When rust-std package split from rustc, we needed to ensure that during
-    // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
-    // the std files during uninstall. To do this ensure that rustc comes
-    // before rust-std in the list below.
-    let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
-                            analysis_installer, docs_installer, std_installer];
-    if target.contains("pc-windows-gnu") {
-        tarballs.push(mingw_installer);
-    }
-    let mut input_tarballs = tarballs[0].as_os_str().to_owned();
-    for tarball in &tarballs[1..] {
-        input_tarballs.push(",");
-        input_tarballs.push(tarball);
-    }
+#[derive(Serialize)]
+pub struct Extended<'a> {
+    stage: u32,
+    target: &'a str,
+}
 
 
-    let mut cmd = rust_installer(build);
-    cmd.arg("combine")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg("--work-dir").arg(&work)
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg("--input-tarballs").arg(input_tarballs)
-       .arg("--non-installed-overlay").arg(&overlay);
-    build.run(&mut cmd);
+impl<'a> Step<'a> for Extended<'a> {
+    type Output = ();
+
+    /// Creates a combined installer for the specified target in the provided stage.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+
+        println!("Dist extended stage{} ({})", stage, target);
+
+        let dist = distdir(build);
+        let rustc_installer = dist.join(format!("{}-{}.tar.gz",
+                                                pkgname(build, "rustc"),
+                                                target));
+        let cargo_installer = dist.join(format!("{}-{}.tar.gz",
+                                                pkgname(build, "cargo"),
+                                                target));
+        let rls_installer = dist.join(format!("{}-{}.tar.gz",
+                                                pkgname(build, "rls"),
+                                                target));
+        let analysis_installer = dist.join(format!("{}-{}.tar.gz",
+                                                    pkgname(build, "rust-analysis"),
+                                                    target));
+        let docs_installer = dist.join(format!("{}-{}.tar.gz",
+                                                pkgname(build, "rust-docs"),
+                                                target));
+        let mingw_installer = dist.join(format!("{}-{}.tar.gz",
+                                                pkgname(build, "rust-mingw"),
+                                                target));
+        let std_installer = dist.join(format!("{}-{}.tar.gz",
+                                                pkgname(build, "rust-std"),
+                                                target));
+
+        let tmp = tmpdir(build);
+        let overlay = tmp.join("extended-overlay");
+        let etc = build.src.join("src/etc/installer");
+        let work = tmp.join("work");
+
+        let _ = fs::remove_dir_all(&overlay);
+        install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
+        install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
+        install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
+        let version = build.rust_version();
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+        install(&etc.join("README.md"), &overlay, 0o644);
+
+        // When rust-std package split from rustc, we needed to ensure that during
+        // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
+        // the std files during uninstall. To do this ensure that rustc comes
+        // before rust-std in the list below.
+        let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
+                                analysis_installer, docs_installer, std_installer];
+        if target.contains("pc-windows-gnu") {
+            tarballs.push(mingw_installer);
+        }
+        let mut input_tarballs = tarballs[0].as_os_str().to_owned();
+        for tarball in &tarballs[1..] {
+            input_tarballs.push(",");
+            input_tarballs.push(tarball);
+        }
 
 
-    let mut license = String::new();
-    t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license));
-    license.push_str("\n");
-    t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license));
-    license.push_str("\n");
-    t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license));
-
-    let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18";
-    let mut rtf = rtf.to_string();
-    rtf.push_str("\n");
-    for line in license.lines() {
-        rtf.push_str(line);
-        rtf.push_str("\\line ");
-    }
-    rtf.push_str("}");
-
-    if target.contains("apple-darwin") {
-        let pkg = tmp.join("pkg");
-        let _ = fs::remove_dir_all(&pkg);
-        t!(fs::create_dir_all(pkg.join("rustc")));
-        t!(fs::create_dir_all(pkg.join("cargo")));
-        t!(fs::create_dir_all(pkg.join("rust-docs")));
-        t!(fs::create_dir_all(pkg.join("rust-std")));
-        t!(fs::create_dir_all(pkg.join("rls")));
-        t!(fs::create_dir_all(pkg.join("rust-analysis")));
-
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)),
-             &pkg.join("rustc"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)),
-             &pkg.join("cargo"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)),
-             &pkg.join("rust-docs"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)),
-             &pkg.join("rust-std"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target)),
-             &pkg.join("rls"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target)),
-             &pkg.join("rust-analysis"));
-
-        install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rls"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rust-analysis"), 0o755);
-
-        let pkgbuild = |component: &str| {
-            let mut cmd = Command::new("pkgbuild");
-            cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component))
-               .arg("--scripts").arg(pkg.join(component))
-               .arg("--nopayload")
-               .arg(pkg.join(component).with_extension("pkg"));
-            build.run(&mut cmd);
-        };
-        pkgbuild("rustc");
-        pkgbuild("cargo");
-        pkgbuild("rust-docs");
-        pkgbuild("rust-std");
-        pkgbuild("rls");
-        pkgbuild("rust-analysis");
-
-        // create an 'uninstall' package
-        install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
-        pkgbuild("uninstall");
-
-        t!(fs::create_dir_all(pkg.join("res")));
-        t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
-        install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
-        let mut cmd = Command::new("productbuild");
-        cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml"))
-           .arg("--resources").arg(pkg.join("res"))
-           .arg(distdir(build).join(format!("{}-{}.pkg",
-                                             pkgname(build, "rust"),
-                                             target)))
-           .arg("--package-path").arg(&pkg);
+        let mut cmd = rust_installer(build);
+        cmd.arg("combine")
+            .arg("--product-name=Rust")
+            .arg("--rel-manifest-dir=rustlib")
+            .arg("--success-message=Rust-is-ready-to-roll.")
+            .arg("--work-dir").arg(&work)
+            .arg("--output-dir").arg(&distdir(build))
+            .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
+            .arg("--legacy-manifest-dirs=rustlib,cargo")
+            .arg("--input-tarballs").arg(input_tarballs)
+            .arg("--non-installed-overlay").arg(&overlay);
         build.run(&mut cmd);
         build.run(&mut cmd);
-    }
 
 
-    if target.contains("windows") {
-        let exe = tmp.join("exe");
-        let _ = fs::remove_dir_all(&exe);
-        t!(fs::create_dir_all(exe.join("rustc")));
-        t!(fs::create_dir_all(exe.join("cargo")));
-        t!(fs::create_dir_all(exe.join("rls")));
-        t!(fs::create_dir_all(exe.join("rust-analysis")));
-        t!(fs::create_dir_all(exe.join("rust-docs")));
-        t!(fs::create_dir_all(exe.join("rust-std")));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target))
-                  .join("rustc"),
-             &exe.join("rustc"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target))
-                  .join("cargo"),
-             &exe.join("cargo"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target))
-                  .join("rust-docs"),
-             &exe.join("rust-docs"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target))
-                  .join(format!("rust-std-{}", target)),
-             &exe.join("rust-std"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target))
-                  .join("rls"),
-             &exe.join("rls"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target))
-                  .join(format!("rust-analysis-{}", target)),
-             &exe.join("rust-analysis"));
-
-        t!(fs::remove_file(exe.join("rustc/manifest.in")));
-        t!(fs::remove_file(exe.join("cargo/manifest.in")));
-        t!(fs::remove_file(exe.join("rust-docs/manifest.in")));
-        t!(fs::remove_file(exe.join("rust-std/manifest.in")));
-        t!(fs::remove_file(exe.join("rls/manifest.in")));
-        t!(fs::remove_file(exe.join("rust-analysis/manifest.in")));
-
-        if target.contains("windows-gnu") {
-            t!(fs::create_dir_all(exe.join("rust-mingw")));
-            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target))
-                      .join("rust-mingw"),
-                 &exe.join("rust-mingw"));
-            t!(fs::remove_file(exe.join("rust-mingw/manifest.in")));
+        let mut license = String::new();
+        t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license));
+        license.push_str("\n");
+        t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license));
+        license.push_str("\n");
+        t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license));
+
+        let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18";
+        let mut rtf = rtf.to_string();
+        rtf.push_str("\n");
+        for line in license.lines() {
+            rtf.push_str(line);
+            rtf.push_str("\\line ");
         }
         }
-
-        install(&etc.join("exe/rust.iss"), &exe, 0o644);
-        install(&etc.join("exe/modpath.iss"), &exe, 0o644);
-        install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
-        install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
-        t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes()));
-
-        // Generate exe installer
-        let mut cmd = Command::new("iscc");
-        cmd.arg("rust.iss")
-           .current_dir(&exe);
-        if target.contains("windows-gnu") {
-            cmd.arg("/dMINGW");
+        rtf.push_str("}");
+
+        if target.contains("apple-darwin") {
+            let pkg = tmp.join("pkg");
+            let _ = fs::remove_dir_all(&pkg);
+            t!(fs::create_dir_all(pkg.join("rustc")));
+            t!(fs::create_dir_all(pkg.join("cargo")));
+            t!(fs::create_dir_all(pkg.join("rust-docs")));
+            t!(fs::create_dir_all(pkg.join("rust-std")));
+            t!(fs::create_dir_all(pkg.join("rls")));
+            t!(fs::create_dir_all(pkg.join("rust-analysis")));
+
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)),
+                    &pkg.join("rustc"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)),
+                    &pkg.join("cargo"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)),
+                    &pkg.join("rust-docs"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)),
+                    &pkg.join("rust-std"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target)),
+                    &pkg.join("rls"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target)),
+                    &pkg.join("rust-analysis"));
+
+            install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rls"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rust-analysis"), 0o755);
+
+            let pkgbuild = |component: &str| {
+                let mut cmd = Command::new("pkgbuild");
+                cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component))
+                    .arg("--scripts").arg(pkg.join(component))
+                    .arg("--nopayload")
+                    .arg(pkg.join(component).with_extension("pkg"));
+                build.run(&mut cmd);
+            };
+            pkgbuild("rustc");
+            pkgbuild("cargo");
+            pkgbuild("rust-docs");
+            pkgbuild("rust-std");
+            pkgbuild("rls");
+            pkgbuild("rust-analysis");
+
+            // create an 'uninstall' package
+            install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
+            pkgbuild("uninstall");
+
+            t!(fs::create_dir_all(pkg.join("res")));
+            t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
+            install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
+            let mut cmd = Command::new("productbuild");
+            cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml"))
+                .arg("--resources").arg(pkg.join("res"))
+                .arg(distdir(build).join(format!("{}-{}.pkg",
+                                                    pkgname(build, "rust"),
+                                                    target)))
+                .arg("--package-path").arg(&pkg);
+            build.run(&mut cmd);
         }
         }
-        add_env(build, &mut cmd, target);
-        build.run(&mut cmd);
-        install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
-                &distdir(build),
-                0o755);
 
 
-        // Generate msi installer
-        let wix = PathBuf::from(env::var_os("WIX").unwrap());
-        let heat = wix.join("bin/heat.exe");
-        let candle = wix.join("bin/candle.exe");
-        let light = wix.join("bin/light.exe");
-
-        let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rustc")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("RustcGroup")
-                        .arg("-dr").arg("Rustc")
-                        .arg("-var").arg("var.RustcDir")
-                        .arg("-out").arg(exe.join("RustcGroup.wxs")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rust-docs")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("DocsGroup")
-                        .arg("-dr").arg("Docs")
-                        .arg("-var").arg("var.DocsDir")
-                        .arg("-out").arg(exe.join("DocsGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/squash-components.xsl")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("cargo")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("CargoGroup")
-                        .arg("-dr").arg("Cargo")
-                        .arg("-var").arg("var.CargoDir")
-                        .arg("-out").arg(exe.join("CargoGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rust-std")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("StdGroup")
-                        .arg("-dr").arg("Std")
-                        .arg("-var").arg("var.StdDir")
-                        .arg("-out").arg(exe.join("StdGroup.wxs")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rls")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("RlsGroup")
-                        .arg("-dr").arg("Rls")
-                        .arg("-var").arg("var.RlsDir")
-                        .arg("-out").arg(exe.join("RlsGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rust-analysis")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("AnalysisGroup")
-                        .arg("-dr").arg("Analysis")
-                        .arg("-var").arg("var.AnalysisDir")
-                        .arg("-out").arg(exe.join("AnalysisGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
-        if target.contains("windows-gnu") {
+        if target.contains("windows") {
+            let exe = tmp.join("exe");
+            let _ = fs::remove_dir_all(&exe);
+            t!(fs::create_dir_all(exe.join("rustc")));
+            t!(fs::create_dir_all(exe.join("cargo")));
+            t!(fs::create_dir_all(exe.join("rls")));
+            t!(fs::create_dir_all(exe.join("rust-analysis")));
+            t!(fs::create_dir_all(exe.join("rust-docs")));
+            t!(fs::create_dir_all(exe.join("rust-std")));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target))
+                        .join("rustc"),
+                    &exe.join("rustc"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target))
+                        .join("cargo"),
+                    &exe.join("cargo"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target))
+                        .join("rust-docs"),
+                    &exe.join("rust-docs"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target))
+                        .join(format!("rust-std-{}", target)),
+                    &exe.join("rust-std"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target))
+                        .join("rls"),
+                    &exe.join("rls"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target))
+                        .join(format!("rust-analysis-{}", target)),
+                    &exe.join("rust-analysis"));
+
+            t!(fs::remove_file(exe.join("rustc/manifest.in")));
+            t!(fs::remove_file(exe.join("cargo/manifest.in")));
+            t!(fs::remove_file(exe.join("rust-docs/manifest.in")));
+            t!(fs::remove_file(exe.join("rust-std/manifest.in")));
+            t!(fs::remove_file(exe.join("rls/manifest.in")));
+            t!(fs::remove_file(exe.join("rust-analysis/manifest.in")));
+
+            if target.contains("windows-gnu") {
+                t!(fs::create_dir_all(exe.join("rust-mingw")));
+                cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target))
+                            .join("rust-mingw"),
+                        &exe.join("rust-mingw"));
+                t!(fs::remove_file(exe.join("rust-mingw/manifest.in")));
+            }
+
+            install(&etc.join("exe/rust.iss"), &exe, 0o644);
+            install(&etc.join("exe/modpath.iss"), &exe, 0o644);
+            install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
+            install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
+            t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes()));
+
+            // Generate exe installer
+            let mut cmd = Command::new("iscc");
+            cmd.arg("rust.iss")
+                .current_dir(&exe);
+            if target.contains("windows-gnu") {
+                cmd.arg("/dMINGW");
+            }
+            add_env(build, &mut cmd, target);
+            build.run(&mut cmd);
+            install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
+                    &distdir(build),
+                    0o755);
+
+            // Generate msi installer
+            let wix = PathBuf::from(env::var_os("WIX").unwrap());
+            let heat = wix.join("bin/heat.exe");
+            let candle = wix.join("bin/candle.exe");
+            let light = wix.join("bin/light.exe");
+
+            let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
             build.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
             build.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
-                            .arg("rust-mingw")
+                            .arg("rustc")
                             .args(&heat_flags)
                             .args(&heat_flags)
-                            .arg("-cg").arg("GccGroup")
-                            .arg("-dr").arg("Gcc")
-                            .arg("-var").arg("var.GccDir")
-                            .arg("-out").arg(exe.join("GccGroup.wxs")));
-        }
+                            .arg("-cg").arg("RustcGroup")
+                            .arg("-dr").arg("Rustc")
+                            .arg("-var").arg("var.RustcDir")
+                            .arg("-out").arg(exe.join("RustcGroup.wxs")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rust-docs")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("DocsGroup")
+                            .arg("-dr").arg("Docs")
+                            .arg("-var").arg("var.DocsDir")
+                            .arg("-out").arg(exe.join("DocsGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/squash-components.xsl")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("cargo")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("CargoGroup")
+                            .arg("-dr").arg("Cargo")
+                            .arg("-var").arg("var.CargoDir")
+                            .arg("-out").arg(exe.join("CargoGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rust-std")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("StdGroup")
+                            .arg("-dr").arg("Std")
+                            .arg("-var").arg("var.StdDir")
+                            .arg("-out").arg(exe.join("StdGroup.wxs")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rls")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("RlsGroup")
+                            .arg("-dr").arg("Rls")
+                            .arg("-var").arg("var.RlsDir")
+                            .arg("-out").arg(exe.join("RlsGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rust-analysis")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("AnalysisGroup")
+                            .arg("-dr").arg("Analysis")
+                            .arg("-var").arg("var.AnalysisDir")
+                            .arg("-out").arg(exe.join("AnalysisGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
+            if target.contains("windows-gnu") {
+                build.run(Command::new(&heat)
+                                .current_dir(&exe)
+                                .arg("dir")
+                                .arg("rust-mingw")
+                                .args(&heat_flags)
+                                .arg("-cg").arg("GccGroup")
+                                .arg("-dr").arg("Gcc")
+                                .arg("-var").arg("var.GccDir")
+                                .arg("-out").arg(exe.join("GccGroup.wxs")));
+            }
 
 
-        let candle = |input: &Path| {
-            let output = exe.join(input.file_stem().unwrap())
-                            .with_extension("wixobj");
-            let arch = if target.contains("x86_64") {"x64"} else {"x86"};
-            let mut cmd = Command::new(&candle);
-            cmd.current_dir(&exe)
-               .arg("-nologo")
-               .arg("-dRustcDir=rustc")
-               .arg("-dDocsDir=rust-docs")
-               .arg("-dCargoDir=cargo")
-               .arg("-dStdDir=rust-std")
-               .arg("-dRlsDir=rls")
-               .arg("-dAnalysisDir=rust-analysis")
-               .arg("-arch").arg(&arch)
-               .arg("-out").arg(&output)
-               .arg(&input);
-            add_env(build, &mut cmd, target);
+            let candle = |input: &Path| {
+                let output = exe.join(input.file_stem().unwrap())
+                                .with_extension("wixobj");
+                let arch = if target.contains("x86_64") {"x64"} else {"x86"};
+                let mut cmd = Command::new(&candle);
+                cmd.current_dir(&exe)
+                    .arg("-nologo")
+                    .arg("-dRustcDir=rustc")
+                    .arg("-dDocsDir=rust-docs")
+                    .arg("-dCargoDir=cargo")
+                    .arg("-dStdDir=rust-std")
+                    .arg("-dRlsDir=rls")
+                    .arg("-dAnalysisDir=rust-analysis")
+                    .arg("-arch").arg(&arch)
+                    .arg("-out").arg(&output)
+                    .arg(&input);
+                add_env(build, &mut cmd, target);
+
+                if target.contains("windows-gnu") {
+                    cmd.arg("-dGccDir=rust-mingw");
+                }
+                build.run(&mut cmd);
+            };
+            candle(&etc.join("msi/rust.wxs"));
+            candle(&etc.join("msi/ui.wxs"));
+            candle(&etc.join("msi/rustwelcomedlg.wxs"));
+            candle("RustcGroup.wxs".as_ref());
+            candle("DocsGroup.wxs".as_ref());
+            candle("CargoGroup.wxs".as_ref());
+            candle("StdGroup.wxs".as_ref());
+            candle("RlsGroup.wxs".as_ref());
+            candle("AnalysisGroup.wxs".as_ref());
 
             if target.contains("windows-gnu") {
 
             if target.contains("windows-gnu") {
-               cmd.arg("-dGccDir=rust-mingw");
+                candle("GccGroup.wxs".as_ref());
             }
             }
-            build.run(&mut cmd);
-        };
-        candle(&etc.join("msi/rust.wxs"));
-        candle(&etc.join("msi/ui.wxs"));
-        candle(&etc.join("msi/rustwelcomedlg.wxs"));
-        candle("RustcGroup.wxs".as_ref());
-        candle("DocsGroup.wxs".as_ref());
-        candle("CargoGroup.wxs".as_ref());
-        candle("StdGroup.wxs".as_ref());
-        candle("RlsGroup.wxs".as_ref());
-        candle("AnalysisGroup.wxs".as_ref());
-
-        if target.contains("windows-gnu") {
-            candle("GccGroup.wxs".as_ref());
-        }
 
 
-        t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes()));
-        install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
-        install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
-
-        let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
-        let mut cmd = Command::new(&light);
-        cmd.arg("-nologo")
-           .arg("-ext").arg("WixUIExtension")
-           .arg("-ext").arg("WixUtilExtension")
-           .arg("-out").arg(exe.join(&filename))
-           .arg("rust.wixobj")
-           .arg("ui.wixobj")
-           .arg("rustwelcomedlg.wixobj")
-           .arg("RustcGroup.wixobj")
-           .arg("DocsGroup.wixobj")
-           .arg("CargoGroup.wixobj")
-           .arg("StdGroup.wixobj")
-           .arg("RlsGroup.wixobj")
-           .arg("AnalysisGroup.wixobj")
-           .current_dir(&exe);
-
-        if target.contains("windows-gnu") {
-           cmd.arg("GccGroup.wixobj");
-        }
-        // ICE57 wrongly complains about the shortcuts
-        cmd.arg("-sice:ICE57");
+            t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes()));
+            install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
+            install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
+
+            let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
+            let mut cmd = Command::new(&light);
+            cmd.arg("-nologo")
+                .arg("-ext").arg("WixUIExtension")
+                .arg("-ext").arg("WixUtilExtension")
+                .arg("-out").arg(exe.join(&filename))
+                .arg("rust.wixobj")
+                .arg("ui.wixobj")
+                .arg("rustwelcomedlg.wixobj")
+                .arg("RustcGroup.wixobj")
+                .arg("DocsGroup.wixobj")
+                .arg("CargoGroup.wixobj")
+                .arg("StdGroup.wixobj")
+                .arg("RlsGroup.wixobj")
+                .arg("AnalysisGroup.wixobj")
+                .current_dir(&exe);
 
 
-        build.run(&mut cmd);
+            if target.contains("windows-gnu") {
+                cmd.arg("GccGroup.wixobj");
+            }
+            // ICE57 wrongly complains about the shortcuts
+            cmd.arg("-sice:ICE57");
+
+            build.run(&mut cmd);
 
 
-        t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
+            t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
+        }
     }
 }
 
     }
 }
 
@@ -1299,36 +1414,44 @@ fn add_env(build: &Build, cmd: &mut Command, target: &str) {
 //      .only_host_build(true)
 //      .dep(move |s| s.name("tool-build-manifest").target(&build.build).stage(0))
 //      .run(move |_| dist::hash_and_sign(build));
 //      .only_host_build(true)
 //      .dep(move |s| s.name("tool-build-manifest").target(&build.build).stage(0))
 //      .run(move |_| dist::hash_and_sign(build));
-//
-pub fn hash_and_sign(build: &Build) {
-    let compiler = Compiler::new(0, &build.build);
-    let mut cmd = build.tool_cmd(&compiler, "build-manifest");
-    let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
-        panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
-    });
-    let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
-        panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
-    });
-    let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
-        panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
-    });
-    let mut pass = String::new();
-    t!(t!(File::open(&file)).read_to_string(&mut pass));
-
-    let today = output(Command::new("date").arg("+%Y-%m-%d"));
-
-    cmd.arg(sign);
-    cmd.arg(distdir(build));
-    cmd.arg(today.trim());
-    cmd.arg(build.rust_package_vers());
-    cmd.arg(build.package_vers(&build.release_num("cargo")));
-    cmd.arg(build.package_vers(&build.release_num("rls")));
-    cmd.arg(addr);
-
-    t!(fs::create_dir_all(distdir(build)));
-
-    let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
-    t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
-    let status = t!(child.wait());
-    assert!(status.success());
+
+#[derive(Serialize)]
+pub struct HashSign;
+
+impl<'a> Step<'a> for HashSign {
+    type Output = ();
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = Compiler::new(0, &build.build);
+        let mut cmd = build.tool_cmd(&compiler, "build-manifest");
+        let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
+            panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
+        });
+        let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
+            panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
+        });
+        let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
+            panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
+        });
+        let mut pass = String::new();
+        t!(t!(File::open(&file)).read_to_string(&mut pass));
+
+        let today = output(Command::new("date").arg("+%Y-%m-%d"));
+
+        cmd.arg(sign);
+        cmd.arg(distdir(build));
+        cmd.arg(today.trim());
+        cmd.arg(build.rust_package_vers());
+        cmd.arg(build.package_vers(&build.release_num("cargo")));
+        cmd.arg(build.package_vers(&build.release_num("rls")));
+        cmd.arg(addr);
+
+        t!(fs::create_dir_all(distdir(build)));
+
+        let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
+        t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
+        let status = t!(child.wait());
+        assert!(status.success());
+    }
 }
 }
index 432fdb6a3cb8c682de94af2a92c75cb109ced0c1..466d63a15acbdd6ce23d2970aa114ccf0f58cd4b 100644 (file)
 //      })
 //      .default(build.config.docs)
 //      .run(move |s| doc::rustbook(build, s.target, "reference"));
 //      })
 //      .default(build.config.docs)
 //      .run(move |s| doc::rustbook(build, s.target, "reference"));
-/// Invoke `rustbook` for `target` for the doc book `name`.
-///
-/// This will not actually generate any documentation if the documentation has
-/// already been generated.
-pub fn rustbook(build: &Build, target: &str, name: &str) {
-    let src = build.src.join("src/doc");
-    rustbook_src(build, target, name, &src);
+
+#[derive(Serialize)]
+pub struct Rustbook<'a> {
+    target: &'a str,
+    name: &'a str,
+}
+
+impl<'a> Step<'a> for Rustbook<'a> {
+    type Output = ();
+
+    /// Invoke `rustbook` for `target` for the doc book `name`.
+    ///
+    /// This will not actually generate any documentation if the documentation has
+    /// already been generated.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let name = self.name;
+        let src = build.src.join("src/doc");
+        rustbook_src(build, target, name, &src);
+    }
 }
 
 //rules.doc("doc-unstable-book", "src/doc/unstable-book")
 }
 
 //rules.doc("doc-unstable-book", "src/doc/unstable-book")
@@ -68,30 +82,44 @@ pub fn rustbook(build: &Build, target: &str, name: &str) {
 //                                     "unstable-book",
 //                                     &build.md_doc_out(s.target)));
 
 //                                     "unstable-book",
 //                                     &build.md_doc_out(s.target)));
 
+#[derive(Serialize)]
+pub struct RustbookSrc<'a> {
+    target: &'a str,
+    name: &'a str,
+    src: &'a Path,
+}
 
 
-/// Invoke `rustbook` for `target` for the doc book `name` from the `src` path.
-///
-/// This will not actually generate any documentation if the documentation has
-/// already been generated.
-pub fn rustbook_src(build: &Build, target: &str, name: &str, src: &Path) {
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-
-    let out = out.join(name);
-    let compiler = Compiler::new(0, &build.build);
-    let src = src.join(name);
-    let index = out.join("index.html");
-    let rustbook = build.tool(&compiler, "rustbook");
-    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
-        return
+impl<'a> Step<'a> for RustbookSrc<'a> {
+    type Output = ();
+
+    /// Invoke `rustbook` for `target` for the doc book `name` from the `src` path.
+    ///
+    /// This will not actually generate any documentation if the documentation has
+    /// already been generated.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let name = self.name;
+        let src = self.src;
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+
+        let out = out.join(name);
+        let compiler = Compiler::new(0, &build.build);
+        let src = src.join(name);
+        let index = out.join("index.html");
+        let rustbook = build.tool(&compiler, "rustbook");
+        if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
+            return
+        }
+        println!("Rustbook ({}) - {}", target, name);
+        let _ = fs::remove_dir_all(&out);
+        build.run(build.tool_cmd(&compiler, "rustbook")
+                       .arg("build")
+                       .arg(&src)
+                       .arg("-d")
+                       .arg(out));
     }
     }
-    println!("Rustbook ({}) - {}", target, name);
-    let _ = fs::remove_dir_all(&out);
-    build.run(build.tool_cmd(&compiler, "rustbook")
-                   .arg("build")
-                   .arg(&src)
-                   .arg("-d")
-                   .arg(out));
 }
 
 // rules.doc("doc-book", "src/doc/book")
 }
 
 // rules.doc("doc-book", "src/doc/book")
@@ -103,34 +131,48 @@ pub fn rustbook_src(build: &Build, target: &str, name: &str, src: &Path) {
 //      })
 //      .default(build.config.docs)
 //      .run(move |s| doc::book(build, s.target, "book"));
 //      })
 //      .default(build.config.docs)
 //      .run(move |s| doc::book(build, s.target, "book"));
-/// Build the book and associated stuff.
-///
-/// We need to build:
-///
-/// * Book (first edition)
-/// * Book (second edition)
-/// * Index page
-/// * Redirect pages
-pub fn book(build: &Build, target: &str, name: &str) {
-    // build book first edition
-    rustbook(build, target, &format!("{}/first-edition", name));
-
-    // build book second edition
-    rustbook(build, target, &format!("{}/second-edition", name));
-
-    // build the index page
-    let index = format!("{}/index.md", name);
-    println!("Documenting book index ({})", target);
-    invoke_rustdoc(build, target, &index);
-
-    // build the redirect pages
-    println!("Documenting book redirect pages ({})", target);
-    for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
-        let file = t!(file);
-        let path = file.path();
-        let path = path.to_str().unwrap();
-
-        invoke_rustdoc(build, target, path);
+
+#[derive(Serialize)]
+pub struct TheBook<'a> {
+    target: &'a str,
+    name: &'a str,
+}
+
+impl<'a> Step<'a> for TheBook<'a> {
+    type Output = ();
+
+    /// Build the book and associated stuff.
+    ///
+    /// We need to build:
+    ///
+    /// * Book (first edition)
+    /// * Book (second edition)
+    /// * Index page
+    /// * Redirect pages
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let name = self.name;
+        // build book first edition
+        rustbook(build, target, &format!("{}/first-edition", name));
+
+        // build book second edition
+        rustbook(build, target, &format!("{}/second-edition", name));
+
+        // build the index page
+        let index = format!("{}/index.md", name);
+        println!("Documenting book index ({})", target);
+        invoke_rustdoc(build, target, &index);
+
+        // build the redirect pages
+        println!("Documenting book redirect pages ({})", target);
+        for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
+            let file = t!(file);
+            let path = file.path();
+            let path = path.to_str().unwrap();
+
+            invoke_rustdoc(build, target, path);
+        }
     }
 }
 
     }
 }
 
@@ -188,75 +230,87 @@ fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
 //      })
 //      .default(build.config.docs)
 //      .run(move |s| doc::standalone(build, s.target));
 //      })
 //      .default(build.config.docs)
 //      .run(move |s| doc::standalone(build, s.target));
-/// Generates all standalone documentation as compiled by the rustdoc in `stage`
-/// for the `target` into `out`.
-///
-/// This will list all of `src/doc` looking for markdown files and appropriately
-/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
-/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
-///
-/// In the end, this is just a glorified wrapper around rustdoc!
-pub fn standalone(build: &Build, target: &str) {
-    println!("Documenting standalone ({})", target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-
-    let compiler = Compiler::new(0, &build.build);
-
-    let favicon = build.src.join("src/doc/favicon.inc");
-    let footer = build.src.join("src/doc/footer.inc");
-    let full_toc = build.src.join("src/doc/full-toc.inc");
-    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
 
 
-    let version_input = build.src.join("src/doc/version_info.html.template");
-    let version_info = out.join("version_info.html");
-
-    if !up_to_date(&version_input, &version_info) {
-        let mut info = String::new();
-        t!(t!(File::open(&version_input)).read_to_string(&mut info));
-        let info = info.replace("VERSION", &build.rust_release())
-                       .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
-                       .replace("STAMP", build.rust_info.sha().unwrap_or(""));
-        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
-    }
+#[derive(Serialize)]
+pub struct Standalone<'a> {
+    target: &'a str,
+}
 
 
-    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
-        let file = t!(file);
-        let path = file.path();
-        let filename = path.file_name().unwrap().to_str().unwrap();
-        if !filename.ends_with(".md") || filename == "README.md" {
-            continue
+impl<'a> Step<'a> for Standalone<'a> {
+    type Output = ();
+
+    /// Generates all standalone documentation as compiled by the rustdoc in `stage`
+    /// for the `target` into `out`.
+    ///
+    /// This will list all of `src/doc` looking for markdown files and appropriately
+    /// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
+    /// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
+    ///
+    /// In the end, this is just a glorified wrapper around rustdoc!
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        println!("Documenting standalone ({})", target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+
+        let compiler = Compiler::new(0, &build.build);
+
+        let favicon = build.src.join("src/doc/favicon.inc");
+        let footer = build.src.join("src/doc/footer.inc");
+        let full_toc = build.src.join("src/doc/full-toc.inc");
+        t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
+
+        let version_input = build.src.join("src/doc/version_info.html.template");
+        let version_info = out.join("version_info.html");
+
+        if !up_to_date(&version_input, &version_info) {
+            let mut info = String::new();
+            t!(t!(File::open(&version_input)).read_to_string(&mut info));
+            let info = info.replace("VERSION", &build.rust_release())
+                           .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
+                           .replace("STAMP", build.rust_info.sha().unwrap_or(""));
+            t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
         }
 
         }
 
-        let html = out.join(filename).with_extension("html");
-        let rustdoc = build.rustdoc(&compiler);
-        if up_to_date(&path, &html) &&
-           up_to_date(&footer, &html) &&
-           up_to_date(&favicon, &html) &&
-           up_to_date(&full_toc, &html) &&
-           up_to_date(&version_info, &html) &&
-           up_to_date(&rustdoc, &html) {
-            continue
+        for file in t!(fs::read_dir(build.src.join("src/doc"))) {
+            let file = t!(file);
+            let path = file.path();
+            let filename = path.file_name().unwrap().to_str().unwrap();
+            if !filename.ends_with(".md") || filename == "README.md" {
+                continue
+            }
+
+            let html = out.join(filename).with_extension("html");
+            let rustdoc = build.rustdoc(&compiler);
+            if up_to_date(&path, &html) &&
+               up_to_date(&footer, &html) &&
+               up_to_date(&favicon, &html) &&
+               up_to_date(&full_toc, &html) &&
+               up_to_date(&version_info, &html) &&
+               up_to_date(&rustdoc, &html) {
+                continue
+            }
+
+            let mut cmd = Command::new(&rustdoc);
+            build.add_rustc_lib_path(&compiler, &mut cmd);
+            cmd.arg("--html-after-content").arg(&footer)
+               .arg("--html-before-content").arg(&version_info)
+               .arg("--html-in-header").arg(&favicon)
+               .arg("--markdown-playground-url")
+               .arg("https://play.rust-lang.org/")
+               .arg("-o").arg(&out)
+               .arg(&path);
+
+            if filename == "not_found.md" {
+                cmd.arg("--markdown-no-toc")
+                   .arg("--markdown-css")
+                   .arg("https://doc.rust-lang.org/rust.css");
+            } else {
+                cmd.arg("--markdown-css").arg("rust.css");
+            }
+            build.run(&mut cmd);
         }
         }
-
-        let mut cmd = Command::new(&rustdoc);
-        build.add_rustc_lib_path(&compiler, &mut cmd);
-        cmd.arg("--html-after-content").arg(&footer)
-           .arg("--html-before-content").arg(&version_info)
-           .arg("--html-in-header").arg(&favicon)
-           .arg("--markdown-playground-url")
-           .arg("https://play.rust-lang.org/")
-           .arg("-o").arg(&out)
-           .arg(&path);
-
-        if filename == "not_found.md" {
-            cmd.arg("--markdown-no-toc")
-               .arg("--markdown-css")
-               .arg("https://doc.rust-lang.org/rust.css");
-        } else {
-            cmd.arg("--markdown-css").arg("rust.css");
-        }
-        build.run(&mut cmd);
     }
 }
 
     }
 }
 
@@ -266,63 +320,77 @@ pub fn standalone(build: &Build, target: &str) {
 //          .default(default && build.config.docs)
 //          .run(move |s| doc::std(build, s.stage, s.target));
 // }
 //          .default(default && build.config.docs)
 //          .run(move |s| doc::std(build, s.stage, s.target));
 // }
-/// Compile all standard library documentation.
-///
-/// This will generate all documentation for the standard library and its
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn std(build: &Build, stage: u32, target: &str) {
-    println!("Documenting stage{} std ({})", stage, target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(stage, &build.build);
-    let compiler = if build.force_use_stage1(&compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler
-    };
-    let out_dir = build.stage_out(&compiler, Mode::Libstd)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
 
 
-    // Here what we're doing is creating a *symlink* (directory junction on
-    // Windows) to the final output location. This is not done as an
-    // optimization but rather for correctness. We've got three trees of
-    // documentation, one for std, one for test, and one for rustc. It's then
-    // our job to merge them all together.
-    //
-    // Unfortunately rustbuild doesn't know nearly as well how to merge doc
-    // trees as rustdoc does itself, so instead of actually having three
-    // separate trees we just have rustdoc output to the same location across
-    // all of them.
-    //
-    // This way rustdoc generates output directly into the output, and rustdoc
-    // will also directly handle merging.
-    let my_out = build.crate_doc_out(target);
-    build.clear_if_dirty(&my_out, &rustdoc);
-    t!(symlink_dir_force(&my_out, &out_dir));
-
-    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/libstd/Cargo.toml"))
-         .arg("--features").arg(build.std_features());
-
-    // We don't want to build docs for internal std dependencies unless
-    // in compiler-docs mode. When not in that mode, we whitelist the crates
-    // for which docs must be built.
-    if !build.config.compiler_docs {
-        cargo.arg("--no-deps");
-        for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
-            cargo.arg("-p").arg(krate);
-            // Create all crate output directories first to make sure rustdoc uses
-            // relative links.
-            // FIXME: Cargo should probably do this itself.
-            t!(fs::create_dir_all(out_dir.join(krate)));
+#[derive(Serialize)]
+pub struct Std<'a> {
+    stage: u32,
+    target: &'a str,
+}
+
+impl<'a> Step<'a> for Std<'a> {
+    type Output = ();
+
+    /// Compile all standard library documentation.
+    ///
+    /// This will generate all documentation for the standard library and its
+    /// dependencies. This is largely just a wrapper around `cargo doc`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        println!("Documenting stage{} std ({})", stage, target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let compiler = Compiler::new(stage, &build.build);
+        let compiler = if build.force_use_stage1(&compiler, target) {
+            Compiler::new(1, compiler.host)
+        } else {
+            compiler
+        };
+        let out_dir = build.stage_out(&compiler, Mode::Libstd)
+                           .join(target).join("doc");
+        let rustdoc = build.rustdoc(&compiler);
+
+        // Here what we're doing is creating a *symlink* (directory junction on
+        // Windows) to the final output location. This is not done as an
+        // optimization but rather for correctness. We've got three trees of
+        // documentation, one for std, one for test, and one for rustc. It's then
+        // our job to merge them all together.
+        //
+        // Unfortunately rustbuild doesn't know nearly as well how to merge doc
+        // trees as rustdoc does itself, so instead of actually having three
+        // separate trees we just have rustdoc output to the same location across
+        // all of them.
+        //
+        // This way rustdoc generates output directly into the output, and rustdoc
+        // will also directly handle merging.
+        let my_out = build.crate_doc_out(target);
+        build.clear_if_dirty(&my_out, &rustdoc);
+        t!(symlink_dir_force(&my_out, &out_dir));
+
+        let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
+        cargo.arg("--manifest-path")
+             .arg(build.src.join("src/libstd/Cargo.toml"))
+             .arg("--features").arg(build.std_features());
+
+        // We don't want to build docs for internal std dependencies unless
+        // in compiler-docs mode. When not in that mode, we whitelist the crates
+        // for which docs must be built.
+        if !build.config.compiler_docs {
+            cargo.arg("--no-deps");
+            for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
+                cargo.arg("-p").arg(krate);
+                // Create all crate output directories first to make sure rustdoc uses
+                // relative links.
+                // FIXME: Cargo should probably do this itself.
+                t!(fs::create_dir_all(out_dir.join(krate)));
+            }
         }
         }
-    }
 
 
 
 
-    build.run(&mut cargo);
-    cp_r(&my_out, &out);
+        build.run(&mut cargo);
+        cp_r(&my_out, &out);
+    }
 }
 
 // for (krate, path, default) in krates("test") {
 }
 
 // for (krate, path, default) in krates("test") {
@@ -333,36 +401,49 @@ pub fn std(build: &Build, stage: u32, target: &str) {
 //          .default(default && build.config.compiler_docs)
 //          .run(move |s| doc::test(build, s.stage, s.target));
 // }
 //          .default(default && build.config.compiler_docs)
 //          .run(move |s| doc::test(build, s.stage, s.target));
 // }
-/// Compile all libtest documentation.
-///
-/// This will generate all documentation for libtest and its dependencies. This
-/// is largely just a wrapper around `cargo doc`.
-pub fn test(build: &Build, stage: u32, target: &str) {
-    println!("Documenting stage{} test ({})", stage, target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(stage, &build.build);
-    let compiler = if build.force_use_stage1(&compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler
-    };
-    let out_dir = build.stage_out(&compiler, Mode::Libtest)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
 
 
-    // See docs in std above for why we symlink
-    let my_out = build.crate_doc_out(target);
-    build.clear_if_dirty(&my_out, &rustdoc);
-    t!(symlink_dir_force(&my_out, &out_dir));
-
-    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/libtest/Cargo.toml"));
-    build.run(&mut cargo);
-    cp_r(&my_out, &out);
+#[derive(Serialize)]
+pub struct Test<'a> {
+    stage: u32,
+    test: &'a str,
 }
 
 }
 
+impl<'a> Step<'a> for Test<'a> {
+    type Output = ();
+
+    /// Compile all libtest documentation.
+    ///
+    /// This will generate all documentation for libtest and its dependencies. This
+    /// is largely just a wrapper around `cargo doc`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        println!("Documenting stage{} test ({})", stage, target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let compiler = Compiler::new(stage, &build.build);
+        let compiler = if build.force_use_stage1(&compiler, target) {
+            Compiler::new(1, compiler.host)
+        } else {
+            compiler
+        };
+        let out_dir = build.stage_out(&compiler, Mode::Libtest)
+                           .join(target).join("doc");
+        let rustdoc = build.rustdoc(&compiler);
+
+        // See docs in std above for why we symlink
+        let my_out = build.crate_doc_out(target);
+        build.clear_if_dirty(&my_out, &rustdoc);
+        t!(symlink_dir_force(&my_out, &out_dir));
+
+        let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
+        cargo.arg("--manifest-path")
+             .arg(build.src.join("src/libtest/Cargo.toml"));
+        build.run(&mut cargo);
+        cp_r(&my_out, &out);
+    }
+}
 
 // for (krate, path, default) in krates("rustc-main") {
 //     rules.doc(&krate.doc_step, path)
 
 // for (krate, path, default) in krates("rustc-main") {
 //     rules.doc(&krate.doc_step, path)
@@ -374,51 +455,65 @@ pub fn test(build: &Build, stage: u32, target: &str) {
 //          .run(move |s| doc::rustc(build, s.stage, s.target));
 // }
 //
 //          .run(move |s| doc::rustc(build, s.stage, s.target));
 // }
 //
-/// Generate all compiler documentation.
-///
-/// This will generate all documentation for the compiler libraries and their
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn rustc(build: &Build, stage: u32, target: &str) {
-    println!("Documenting stage{} compiler ({})", stage, target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(stage, &build.build);
-    let compiler = if build.force_use_stage1(&compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler
-    };
-    let out_dir = build.stage_out(&compiler, Mode::Librustc)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
 
 
-    // See docs in std above for why we symlink
-    let my_out = build.crate_doc_out(target);
-    build.clear_if_dirty(&my_out, &rustdoc);
-    t!(symlink_dir_force(&my_out, &out_dir));
-
-    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"))
-         .arg("--features").arg(build.rustc_features());
-
-    if build.config.compiler_docs {
-        // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
-        // which would otherwise overwrite the docs for the real rustc and
-        // rustdoc lib crates.
-        cargo.arg("-p").arg("rustc_driver")
-             .arg("-p").arg("rustdoc");
-    } else {
-        // Like with libstd above if compiler docs aren't enabled then we're not
-        // documenting internal dependencies, so we have a whitelist.
-        cargo.arg("--no-deps");
-        for krate in &["proc_macro"] {
-            cargo.arg("-p").arg(krate);
+#[derive(Serialize)]
+pub struct Rustc<'a> {
+    stage: u32,
+    target: &'a str,
+}
+
+impl<'a> Step<'a> for Rustc<'a> {
+    type Output = ();
+
+    /// Generate all compiler documentation.
+    ///
+    /// This will generate all documentation for the compiler libraries and their
+    /// dependencies. This is largely just a wrapper around `cargo doc`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        println!("Documenting stage{} compiler ({})", stage, target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let compiler = Compiler::new(stage, &build.build);
+        let compiler = if build.force_use_stage1(&compiler, target) {
+            Compiler::new(1, compiler.host)
+        } else {
+            compiler
+        };
+        let out_dir = build.stage_out(&compiler, Mode::Librustc)
+                           .join(target).join("doc");
+        let rustdoc = build.rustdoc(&compiler);
+
+        // See docs in std above for why we symlink
+        let my_out = build.crate_doc_out(target);
+        build.clear_if_dirty(&my_out, &rustdoc);
+        t!(symlink_dir_force(&my_out, &out_dir));
+
+        let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
+        cargo.arg("--manifest-path")
+             .arg(build.src.join("src/rustc/Cargo.toml"))
+             .arg("--features").arg(build.rustc_features());
+
+        if build.config.compiler_docs {
+            // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
+            // which would otherwise overwrite the docs for the real rustc and
+            // rustdoc lib crates.
+            cargo.arg("-p").arg("rustc_driver")
+                 .arg("-p").arg("rustdoc");
+        } else {
+            // Like with libstd above if compiler docs aren't enabled then we're not
+            // documenting internal dependencies, so we have a whitelist.
+            cargo.arg("--no-deps");
+            for krate in &["proc_macro"] {
+                cargo.arg("-p").arg(krate);
+            }
         }
         }
-    }
 
 
-    build.run(&mut cargo);
-    cp_r(&my_out, &out);
+        build.run(&mut cargo);
+        cp_r(&my_out, &out);
+    }
 }
 
 // rules.doc("doc-error-index", "src/tools/error_index_generator")
 }
 
 // rules.doc("doc-error-index", "src/tools/error_index_generator")
@@ -428,21 +523,32 @@ pub fn rustc(build: &Build, stage: u32, target: &str) {
 //      .host(true)
 //      .run(move |s| doc::error_index(build, s.target));
 
 //      .host(true)
 //      .run(move |s| doc::error_index(build, s.target));
 
-/// Generates the HTML rendered error-index by running the
-/// `error_index_generator` tool.
-pub fn error_index(build: &Build, target: &str) {
-    println!("Documenting error index ({})", target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(0, &build.build);
-    let mut index = build.tool_cmd(&compiler, "error_index_generator");
-    index.arg("html");
-    index.arg(out.join("error-index.html"));
-
-    // FIXME: shouldn't have to pass this env var
-    index.env("CFG_BUILD", &build.build);
+#[derive(Serialize)]
+pub struct ErrorIndex<'a> {
+    target: &'a str,
+}
 
 
-    build.run(&mut index);
+impl<'a> Step<'a> for ErrorIndex<'a> {
+    type Output = ();
+
+    /// Generates the HTML rendered error-index by running the
+    /// `error_index_generator` tool.
+    fn run(self, builder: &Builder) {
+        let builder = builder.build;
+        let target = self.target;
+        println!("Documenting error index ({})", target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let compiler = Compiler::new(0, &build.build);
+        let mut index = build.tool_cmd(&compiler, "error_index_generator");
+        index.arg("html");
+        index.arg(out.join("error-index.html"));
+
+        // FIXME: shouldn't have to pass this env var
+        index.env("CFG_BUILD", &build.build);
+
+        build.run(&mut index);
+    }
 }
 
 // rules.doc("doc-unstable-book-gen", "src/tools/unstable-book-gen")
 }
 
 // rules.doc("doc-unstable-book-gen", "src/tools/unstable-book-gen")
@@ -457,17 +563,28 @@ pub fn error_index(build: &Build, target: &str) {
 //      .host(true)
 //      .run(move |s| doc::unstable_book_gen(build, s.target));
 
 //      .host(true)
 //      .run(move |s| doc::unstable_book_gen(build, s.target));
 
-pub fn unstable_book_gen(build: &Build, target: &str) {
-    println!("Generating unstable book md files ({})", target);
-    let out = build.md_doc_out(target).join("unstable-book");
-    t!(fs::create_dir_all(&out));
-    t!(fs::remove_dir_all(&out));
-    let compiler = Compiler::new(0, &build.build);
-    let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen");
-    cmd.arg(build.src.join("src"));
-    cmd.arg(out);
+#[derive(Serialize)]
+pub struct UnstableBookGen<'a> {
+    target: &'a str,
+}
 
 
-    build.run(&mut cmd);
+impl<'a> Step<'a> for UnstableBookGen<'a> {
+    type Output = ();
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        println!("Generating unstable book md files ({})", target);
+        let out = build.md_doc_out(target).join("unstable-book");
+        t!(fs::create_dir_all(&out));
+        t!(fs::remove_dir_all(&out));
+        let compiler = Compiler::new(0, &build.build);
+        let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen");
+        cmd.arg(build.src.join("src"));
+        cmd.arg(out);
+
+        build.run(&mut cmd);
+    }
 }
 
 fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
 }
 
 fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
index d28060559aca5c4bb65b21c3bddb375af7669c80..6ae5c03bfeb4c8b86a039f99d4c641fcee659ee2 100644 (file)
@@ -33,7 +33,7 @@
 use util;
 use build_helper::up_to_date;
 
 use util;
 use build_helper::up_to_date;
 
-/ rules.build("llvm", "src/llvm")
+/j/ rules.build("llvm", "src/llvm")
 //      .host(true)
 //      .dep(move |s| {
 //          if s.target == build.build {
 //      .host(true)
 //      .dep(move |s| {
 //          if s.target == build.build {
 //          }
 //      })
 //      .run(move |s| native::llvm(build, s.target));
 //          }
 //      })
 //      .run(move |s| native::llvm(build, s.target));
-/// Compile LLVM for `target`.
-pub fn llvm(build: &Build, target: &str) {
-    // If we're using a custom LLVM bail out here, but we can only use a
-    // custom LLVM for the build triple.
-    if let Some(config) = build.config.target_config.get(target) {
-        if let Some(ref s) = config.llvm_config {
-            return check_llvm_version(build, s);
+
+#[derive(Serialize)]
+pub struct Llvm<'a> {
+    pub target: &'a str,
+}
+
+impl<'a> Step<'a> for Llvm<'a> {
+    type Output = ();
+
+    /// Compile LLVM for `target`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        // If we're using a custom LLVM bail out here, but we can only use a
+        // custom LLVM for the build triple.
+        if let Some(config) = build.config.target_config.get(target) {
+            if let Some(ref s) = config.llvm_config {
+                return check_llvm_version(build, s);
+            }
         }
         }
-    }
 
 
-    let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
-    let mut rebuild_trigger_contents = String::new();
-    t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
+        let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
+        let mut rebuild_trigger_contents = String::new();
+        t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
 
 
-    let out_dir = build.llvm_out(target);
-    let done_stamp = out_dir.join("llvm-finished-building");
-    if done_stamp.exists() {
-        let mut done_contents = String::new();
-        t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
+        let out_dir = build.llvm_out(target);
+        let done_stamp = out_dir.join("llvm-finished-building");
+        if done_stamp.exists() {
+            let mut done_contents = String::new();
+            t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
 
 
-        // If LLVM was already built previously and contents of the rebuild-trigger file
-        // didn't change from the previous build, then no action is required.
-        if done_contents == rebuild_trigger_contents {
-            return
+            // If LLVM was already built previously and contents of the rebuild-trigger file
+            // didn't change from the previous build, then no action is required.
+            if done_contents == rebuild_trigger_contents {
+                return
+            }
+        }
+        if build.config.llvm_clean_rebuild {
+            drop(fs::remove_dir_all(&out_dir));
         }
         }
-    }
-    if build.config.llvm_clean_rebuild {
-        drop(fs::remove_dir_all(&out_dir));
-    }
 
 
-    let _folder = build.fold_output(|| "llvm");
-    println!("Building LLVM for {}", target);
-    let _time = util::timeit();
-    t!(fs::create_dir_all(&out_dir));
+        let _folder = build.fold_output(|| "llvm");
+        println!("Building LLVM for {}", target);
+        let _time = util::timeit();
+        t!(fs::create_dir_all(&out_dir));
 
 
-    // http://llvm.org/docs/CMake.html
-    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
-    if build.config.ninja {
-        cfg.generator("Ninja");
-    }
+        // http://llvm.org/docs/CMake.html
+        let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
+        if build.config.ninja {
+            cfg.generator("Ninja");
+        }
 
 
-    let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
-        (false, _) => "Debug",
-        (true, false) => "Release",
-        (true, true) => "RelWithDebInfo",
-    };
-
-    // NOTE: remember to also update `config.toml.example` when changing the defaults!
-    let llvm_targets = match build.config.llvm_targets {
-        Some(ref s) => s,
-        None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon",
-    };
-
-    let llvm_exp_targets = match build.config.llvm_experimental_targets {
-        Some(ref s) => s,
-        None => "",
-    };
-
-    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
-
-    cfg.target(target)
-       .host(&build.build)
-       .out_dir(&out_dir)
-       .profile(profile)
-       .define("LLVM_ENABLE_ASSERTIONS", assertions)
-       .define("LLVM_TARGETS_TO_BUILD", llvm_targets)
-       .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets)
-       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
-       .define("LLVM_INCLUDE_TESTS", "OFF")
-       .define("LLVM_INCLUDE_DOCS", "OFF")
-       .define("LLVM_ENABLE_ZLIB", "OFF")
-       .define("WITH_POLLY", "OFF")
-       .define("LLVM_ENABLE_TERMINFO", "OFF")
-       .define("LLVM_ENABLE_LIBEDIT", "OFF")
-       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
-       .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
-       .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
-
-    if target.contains("msvc") {
-        cfg.define("LLVM_USE_CRT_DEBUG", "MT");
-        cfg.define("LLVM_USE_CRT_RELEASE", "MT");
-        cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
-        cfg.static_crt(true);
-    }
+        let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
+            (false, _) => "Debug",
+            (true, false) => "Release",
+            (true, true) => "RelWithDebInfo",
+        };
 
 
-    if target.starts_with("i686") {
-        cfg.define("LLVM_BUILD_32_BITS", "ON");
-    }
+        // NOTE: remember to also update `config.toml.example` when changing the defaults!
+        let llvm_targets = match build.config.llvm_targets {
+            Some(ref s) => s,
+            None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon",
+        };
 
 
-    if let Some(num_linkers) = build.config.llvm_link_jobs {
-        if num_linkers > 0 {
-            cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
-        }
-    }
+        let llvm_exp_targets = match build.config.llvm_experimental_targets {
+            Some(ref s) => s,
+            None => "",
+        };
 
 
-    // http://llvm.org/docs/HowToCrossCompileLLVM.html
-    if target != build.build {
-        // FIXME: if the llvm root for the build triple is overridden then we
-        //        should use llvm-tblgen from there, also should verify that it
-        //        actually exists most of the time in normal installs of LLVM.
-        let host = build.llvm_out(&build.build).join("bin/llvm-tblgen");
-        cfg.define("CMAKE_CROSSCOMPILING", "True")
-           .define("LLVM_TABLEGEN", &host);
-    }
+        let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
+
+        cfg.target(target)
+           .host(&build.build)
+           .out_dir(&out_dir)
+           .profile(profile)
+           .define("LLVM_ENABLE_ASSERTIONS", assertions)
+           .define("LLVM_TARGETS_TO_BUILD", llvm_targets)
+           .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets)
+           .define("LLVM_INCLUDE_EXAMPLES", "OFF")
+           .define("LLVM_INCLUDE_TESTS", "OFF")
+           .define("LLVM_INCLUDE_DOCS", "OFF")
+           .define("LLVM_ENABLE_ZLIB", "OFF")
+           .define("WITH_POLLY", "OFF")
+           .define("LLVM_ENABLE_TERMINFO", "OFF")
+           .define("LLVM_ENABLE_LIBEDIT", "OFF")
+           .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
+           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
+           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
 
 
-    let sanitize_cc = |cc: &Path| {
         if target.contains("msvc") {
         if target.contains("msvc") {
-            OsString::from(cc.to_str().unwrap().replace("\\", "/"))
-        } else {
-            cc.as_os_str().to_owned()
+            cfg.define("LLVM_USE_CRT_DEBUG", "MT");
+            cfg.define("LLVM_USE_CRT_RELEASE", "MT");
+            cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
+            cfg.static_crt(true);
         }
         }
-    };
 
 
-    let configure_compilers = |cfg: &mut cmake::Config| {
-        // MSVC with CMake uses msbuild by default which doesn't respect these
-        // vars that we'd otherwise configure. In that case we just skip this
-        // entirely.
-        if target.contains("msvc") && !build.config.ninja {
-            return
+        if target.starts_with("i686") {
+            cfg.define("LLVM_BUILD_32_BITS", "ON");
         }
 
         }
 
-        let cc = build.cc(target);
-        let cxx = build.cxx(target).unwrap();
-
-        // Handle msvc + ninja + ccache specially (this is what the bots use)
-        if target.contains("msvc") &&
-           build.config.ninja &&
-           build.config.ccache.is_some() {
-            let mut cc = env::current_exe().expect("failed to get cwd");
-            cc.set_file_name("sccache-plus-cl.exe");
-
-           cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
-              .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
-           cfg.env("SCCACHE_PATH",
-                   build.config.ccache.as_ref().unwrap())
-              .env("SCCACHE_TARGET", target);
-
-        // If ccache is configured we inform the build a little differently hwo
-        // to invoke ccache while also invoking our compilers.
-        } else if let Some(ref ccache) = build.config.ccache {
-           cfg.define("CMAKE_C_COMPILER", ccache)
-              .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
-              .define("CMAKE_CXX_COMPILER", ccache)
-              .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
-        } else {
-           cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
-              .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
+        if let Some(num_linkers) = build.config.llvm_link_jobs {
+            if num_linkers > 0 {
+                cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
+            }
         }
 
         }
 
-        cfg.build_arg("-j").build_arg(build.jobs().to_string());
-        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
-        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
-    };
+        // http://llvm.org/docs/HowToCrossCompileLLVM.html
+        if target != build.build {
+            // FIXME: if the llvm root for the build triple is overridden then we
+            //        should use llvm-tblgen from there, also should verify that it
+            //        actually exists most of the time in normal installs of LLVM.
+            let host = build.llvm_out(&build.build).join("bin/llvm-tblgen");
+            cfg.define("CMAKE_CROSSCOMPILING", "True")
+               .define("LLVM_TABLEGEN", &host);
+        }
 
 
-    configure_compilers(&mut cfg);
+        let sanitize_cc = |cc: &Path| {
+            if target.contains("msvc") {
+                OsString::from(cc.to_str().unwrap().replace("\\", "/"))
+            } else {
+                cc.as_os_str().to_owned()
+            }
+        };
 
 
-    if env::var_os("SCCACHE_ERROR_LOG").is_some() {
-        cfg.env("RUST_LOG", "sccache=warn");
-    }
+        let configure_compilers = |cfg: &mut cmake::Config| {
+            // MSVC with CMake uses msbuild by default which doesn't respect these
+            // vars that we'd otherwise configure. In that case we just skip this
+            // entirely.
+            if target.contains("msvc") && !build.config.ninja {
+                return
+            }
+
+            let cc = build.cc(target);
+            let cxx = build.cxx(target).unwrap();
+
+            // Handle msvc + ninja + ccache specially (this is what the bots use)
+            if target.contains("msvc") &&
+               build.config.ninja &&
+               build.config.ccache.is_some() {
+                let mut cc = env::current_exe().expect("failed to get cwd");
+                cc.set_file_name("sccache-plus-cl.exe");
+
+               cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
+                  .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
+               cfg.env("SCCACHE_PATH",
+                       build.config.ccache.as_ref().unwrap())
+                  .env("SCCACHE_TARGET", target);
+
+            // If ccache is configured we inform the build a little differently hwo
+            // to invoke ccache while also invoking our compilers.
+            } else if let Some(ref ccache) = build.config.ccache {
+               cfg.define("CMAKE_C_COMPILER", ccache)
+                  .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
+                  .define("CMAKE_CXX_COMPILER", ccache)
+                  .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
+            } else {
+               cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
+                  .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
+            }
+
+            cfg.build_arg("-j").build_arg(build.jobs().to_string());
+            cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
+            cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
+        };
 
 
-    // FIXME: we don't actually need to build all LLVM tools and all LLVM
-    //        libraries here, e.g. we just want a few components and a few
-    //        tools. Figure out how to filter them down and only build the right
-    //        tools and libs on all platforms.
-    cfg.build();
+        configure_compilers(&mut cfg);
 
 
-    t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
+        if env::var_os("SCCACHE_ERROR_LOG").is_some() {
+            cfg.env("RUST_LOG", "sccache=warn");
+        }
+
+        // FIXME: we don't actually need to build all LLVM tools and all LLVM
+        //        libraries here, e.g. we just want a few components and a few
+        //        tools. Figure out how to filter them down and only build the right
+        //        tools and libs on all platforms.
+        cfg.build();
+
+        t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
+    }
 }
 
 fn check_llvm_version(build: &Build, llvm_config: &Path) {
 }
 
 fn check_llvm_version(build: &Build, llvm_config: &Path) {
@@ -228,39 +240,52 @@ fn check_llvm_version(build: &Build, llvm_config: &Path) {
 
 //rules.build("test-helpers", "src/rt/rust_test_helpers.c")
 //     .run(move |s| native::test_helpers(build, s.target));
 
 //rules.build("test-helpers", "src/rt/rust_test_helpers.c")
 //     .run(move |s| native::test_helpers(build, s.target));
-/// Compiles the `rust_test_helpers.c` library which we used in various
-/// `run-pass` test suites for ABI testing.
-pub fn test_helpers(build: &Build, target: &str) {
-    let dst = build.test_helpers_out(target);
-    let src = build.src.join("src/rt/rust_test_helpers.c");
-    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
-        return
-    }
 
 
-    let _folder = build.fold_output(|| "build_test_helpers");
-    println!("Building test helpers");
-    t!(fs::create_dir_all(&dst));
-    let mut cfg = gcc::Config::new();
-
-    // We may have found various cross-compilers a little differently due to our
-    // extra configuration, so inform gcc of these compilers. Note, though, that
-    // on MSVC we still need gcc's detection of env vars (ugh).
-    if !target.contains("msvc") {
-        if let Some(ar) = build.ar(target) {
-            cfg.archiver(ar);
+#[derive(Serialize)]
+pub struct TestHelpers<'a> {
+    target: &'a str,
+}
+
+impl<'a> Step<'a> for TestHelpers<'a> {
+    type Output = ();
+
+    /// Compiles the `rust_test_helpers.c` library which we used in various
+    /// `run-pass` test suites for ABI testing.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let dst = build.test_helpers_out(target);
+        let src = build.src.join("src/rt/rust_test_helpers.c");
+        if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
+            return
+        }
+
+        let _folder = build.fold_output(|| "build_test_helpers");
+        println!("Building test helpers");
+        t!(fs::create_dir_all(&dst));
+        let mut cfg = gcc::Config::new();
+
+        // We may have found various cross-compilers a little differently due to our
+        // extra configuration, so inform gcc of these compilers. Note, though, that
+        // on MSVC we still need gcc's detection of env vars (ugh).
+        if !target.contains("msvc") {
+            if let Some(ar) = build.ar(target) {
+                cfg.archiver(ar);
+            }
+            cfg.compiler(build.cc(target));
         }
         }
-        cfg.compiler(build.cc(target));
-    }
 
 
-    cfg.cargo_metadata(false)
-       .out_dir(&dst)
-       .target(target)
-       .host(&build.build)
-       .opt_level(0)
-       .debug(false)
-       .file(build.src.join("src/rt/rust_test_helpers.c"))
-       .compile("librust_test_helpers.a");
+        cfg.cargo_metadata(false)
+           .out_dir(&dst)
+           .target(target)
+           .host(&build.build)
+           .opt_level(0)
+           .debug(false)
+           .file(build.src.join("src/rt/rust_test_helpers.c"))
+           .compile("librust_test_helpers.a");
+    }
 }
 }
+
 const OPENSSL_VERS: &'static str = "1.0.2k";
 const OPENSSL_SHA256: &'static str =
     "6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0";
 const OPENSSL_VERS: &'static str = "1.0.2k";
 const OPENSSL_SHA256: &'static str =
     "6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0";
@@ -268,124 +293,135 @@ pub fn test_helpers(build: &Build, target: &str) {
 //rules.build("openssl", "path/to/nowhere")
 //     .run(move |s| native::openssl(build, s.target));
 
 //rules.build("openssl", "path/to/nowhere")
 //     .run(move |s| native::openssl(build, s.target));
 
-pub fn openssl(build: &Build, target: &str) {
-    let out = match build.openssl_dir(target) {
-        Some(dir) => dir,
-        None => return,
-    };
+#[derive(Serialize)]
+pub struct Openssl<'a> {
+    target: &'a str,
+}
 
 
-    let stamp = out.join(".stamp");
-    let mut contents = String::new();
-    drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents)));
-    if contents == OPENSSL_VERS {
-        return
-    }
-    t!(fs::create_dir_all(&out));
-
-    let name = format!("openssl-{}.tar.gz", OPENSSL_VERS);
-    let tarball = out.join(&name);
-    if !tarball.exists() {
-        let tmp = tarball.with_extension("tmp");
-        // originally from https://www.openssl.org/source/...
-        let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
-                          name);
-        let mut ok = false;
-        for _ in 0..3 {
-            let status = Command::new("curl")
-                            .arg("-o").arg(&tmp)
-                            .arg(&url)
-                            .status()
-                            .expect("failed to spawn curl");
-            if status.success() {
-                ok = true;
-                break
-            }
+impl<'a> Step<'a> for Openssl<'a> {
+    type Output = ();
+
+    fn run(self, builder: &Builder) {
+        let build = bulder.build;
+        let target = self.target;
+        let out = match build.openssl_dir(target) {
+            Some(dir) => dir,
+            None => return,
+        };
+
+        let stamp = out.join(".stamp");
+        let mut contents = String::new();
+        drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents)));
+        if contents == OPENSSL_VERS {
+            return
         }
         }
-        if !ok {
-            panic!("failed to download openssl source")
+        t!(fs::create_dir_all(&out));
+
+        let name = format!("openssl-{}.tar.gz", OPENSSL_VERS);
+        let tarball = out.join(&name);
+        if !tarball.exists() {
+            let tmp = tarball.with_extension("tmp");
+            // originally from https://www.openssl.org/source/...
+            let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
+                              name);
+            let mut ok = false;
+            for _ in 0..3 {
+                let status = Command::new("curl")
+                                .arg("-o").arg(&tmp)
+                                .arg(&url)
+                                .status()
+                                .expect("failed to spawn curl");
+                if status.success() {
+                    ok = true;
+                    break
+                }
+            }
+            if !ok {
+                panic!("failed to download openssl source")
+            }
+            let mut shasum = if target.contains("apple") {
+                let mut cmd = Command::new("shasum");
+                cmd.arg("-a").arg("256");
+                cmd
+            } else {
+                Command::new("sha256sum")
+            };
+            let output = output(&mut shasum.arg(&tmp));
+            let found = output.split_whitespace().next().unwrap();
+            if found != OPENSSL_SHA256 {
+                panic!("downloaded openssl sha256 different\n\
+                        expected: {}\n\
+                        found:    {}\n", OPENSSL_SHA256, found);
+            }
+            t!(fs::rename(&tmp, &tarball));
         }
         }
-        let mut shasum = if target.contains("apple") {
-            let mut cmd = Command::new("shasum");
-            cmd.arg("-a").arg("256");
-            cmd
-        } else {
-            Command::new("sha256sum")
+        let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
+        let dst = build.openssl_install_dir(target).unwrap();
+        drop(fs::remove_dir_all(&obj));
+        drop(fs::remove_dir_all(&dst));
+        build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out));
+
+        let mut configure = Command::new(obj.join("Configure"));
+        configure.arg(format!("--prefix={}", dst.display()));
+        configure.arg("no-dso");
+        configure.arg("no-ssl2");
+        configure.arg("no-ssl3");
+
+        let os = match target {
+            "aarch64-linux-android" => "linux-aarch64",
+            "aarch64-unknown-linux-gnu" => "linux-aarch64",
+            "arm-linux-androideabi" => "android",
+            "arm-unknown-linux-gnueabi" => "linux-armv4",
+            "arm-unknown-linux-gnueabihf" => "linux-armv4",
+            "armv7-linux-androideabi" => "android-armv7",
+            "armv7-unknown-linux-gnueabihf" => "linux-armv4",
+            "i686-apple-darwin" => "darwin-i386-cc",
+            "i686-linux-android" => "android-x86",
+            "i686-unknown-freebsd" => "BSD-x86-elf",
+            "i686-unknown-linux-gnu" => "linux-elf",
+            "i686-unknown-linux-musl" => "linux-elf",
+            "mips-unknown-linux-gnu" => "linux-mips32",
+            "mips64-unknown-linux-gnuabi64" => "linux64-mips64",
+            "mips64el-unknown-linux-gnuabi64" => "linux64-mips64",
+            "mipsel-unknown-linux-gnu" => "linux-mips32",
+            "powerpc-unknown-linux-gnu" => "linux-ppc",
+            "powerpc64-unknown-linux-gnu" => "linux-ppc64",
+            "powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
+            "s390x-unknown-linux-gnu" => "linux64-s390x",
+            "x86_64-apple-darwin" => "darwin64-x86_64-cc",
+            "x86_64-linux-android" => "linux-x86_64",
+            "x86_64-unknown-freebsd" => "BSD-x86_64",
+            "x86_64-unknown-linux-gnu" => "linux-x86_64",
+            "x86_64-unknown-linux-musl" => "linux-x86_64",
+            "x86_64-unknown-netbsd" => "BSD-x86_64",
+            _ => panic!("don't know how to configure OpenSSL for {}", target),
         };
         };
-        let output = output(&mut shasum.arg(&tmp));
-        let found = output.split_whitespace().next().unwrap();
-        if found != OPENSSL_SHA256 {
-            panic!("downloaded openssl sha256 different\n\
-                    expected: {}\n\
-                    found:    {}\n", OPENSSL_SHA256, found);
+        configure.arg(os);
+        configure.env("CC", build.cc(target));
+        for flag in build.cflags(target) {
+            configure.arg(flag);
         }
         }
-        t!(fs::rename(&tmp, &tarball));
-    }
-    let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
-    let dst = build.openssl_install_dir(target).unwrap();
-    drop(fs::remove_dir_all(&obj));
-    drop(fs::remove_dir_all(&dst));
-    build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out));
-
-    let mut configure = Command::new(obj.join("Configure"));
-    configure.arg(format!("--prefix={}", dst.display()));
-    configure.arg("no-dso");
-    configure.arg("no-ssl2");
-    configure.arg("no-ssl3");
-
-    let os = match target {
-        "aarch64-linux-android" => "linux-aarch64",
-        "aarch64-unknown-linux-gnu" => "linux-aarch64",
-        "arm-linux-androideabi" => "android",
-        "arm-unknown-linux-gnueabi" => "linux-armv4",
-        "arm-unknown-linux-gnueabihf" => "linux-armv4",
-        "armv7-linux-androideabi" => "android-armv7",
-        "armv7-unknown-linux-gnueabihf" => "linux-armv4",
-        "i686-apple-darwin" => "darwin-i386-cc",
-        "i686-linux-android" => "android-x86",
-        "i686-unknown-freebsd" => "BSD-x86-elf",
-        "i686-unknown-linux-gnu" => "linux-elf",
-        "i686-unknown-linux-musl" => "linux-elf",
-        "mips-unknown-linux-gnu" => "linux-mips32",
-        "mips64-unknown-linux-gnuabi64" => "linux64-mips64",
-        "mips64el-unknown-linux-gnuabi64" => "linux64-mips64",
-        "mipsel-unknown-linux-gnu" => "linux-mips32",
-        "powerpc-unknown-linux-gnu" => "linux-ppc",
-        "powerpc64-unknown-linux-gnu" => "linux-ppc64",
-        "powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
-        "s390x-unknown-linux-gnu" => "linux64-s390x",
-        "x86_64-apple-darwin" => "darwin64-x86_64-cc",
-        "x86_64-linux-android" => "linux-x86_64",
-        "x86_64-unknown-freebsd" => "BSD-x86_64",
-        "x86_64-unknown-linux-gnu" => "linux-x86_64",
-        "x86_64-unknown-linux-musl" => "linux-x86_64",
-        "x86_64-unknown-netbsd" => "BSD-x86_64",
-        _ => panic!("don't know how to configure OpenSSL for {}", target),
-    };
-    configure.arg(os);
-    configure.env("CC", build.cc(target));
-    for flag in build.cflags(target) {
-        configure.arg(flag);
-    }
-    // There is no specific os target for android aarch64 or x86_64,
-    // so we need to pass some extra cflags
-    if target == "aarch64-linux-android" || target == "x86_64-linux-android" {
-        configure.arg("-mandroid");
-        configure.arg("-fomit-frame-pointer");
-    }
-    // Make PIE binaries
-    // Non-PIE linker support was removed in Lollipop
-    // https://source.android.com/security/enhancements/enhancements50
-    if target == "i686-linux-android" {
-        configure.arg("no-asm");
+        // There is no specific os target for android aarch64 or x86_64,
+        // so we need to pass some extra cflags
+        if target == "aarch64-linux-android" || target == "x86_64-linux-android" {
+            configure.arg("-mandroid");
+            configure.arg("-fomit-frame-pointer");
+        }
+        // Make PIE binaries
+        // Non-PIE linker support was removed in Lollipop
+        // https://source.android.com/security/enhancements/enhancements50
+        if target == "i686-linux-android" {
+            configure.arg("no-asm");
+        }
+        configure.current_dir(&obj);
+        println!("Configuring openssl for {}", target);
+        build.run_quiet(&mut configure);
+        println!("Building openssl for {}", target);
+        build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
+        println!("Installing openssl for {}", target);
+        build.run_quiet(Command::new("make").arg("install").current_dir(&obj));
+
+        let mut f = t!(File::create(&stamp));
+        t!(f.write_all(OPENSSL_VERS.as_bytes()));
     }
     }
-    configure.current_dir(&obj);
-    println!("Configuring openssl for {}", target);
-    build.run_quiet(&mut configure);
-    println!("Building openssl for {}", target);
-    build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
-    println!("Installing openssl for {}", target);
-    build.run_quiet(Command::new("make").arg("install").current_dir(&obj));
-
-    let mut f = t!(File::create(&stamp));
-    t!(f.write_all(OPENSSL_VERS.as_bytes()));
 }
 }
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
new file mode 100644 (file)
index 0000000..52ec273
--- /dev/null
@@ -0,0 +1,205 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::env;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use Mode;
+use builder::{Step, Builder};
+use util::{exe, add_lib_path};
+use compile::{self, stamp, Rustc};
+use native;
+use channel::GitInfo;
+
+//// ========================================================================
+//// Build tools
+////
+//// Tools used during the build system but not shipped
+//// "pseudo rule" which represents completely cleaning out the tools dir in
+//// one stage. This needs to happen whenever a dependency changes (e.g.
+//// libstd, libtest, librustc) and all of the tool compilations above will
+//// be sequenced after this rule.
+//rules.build("maybe-clean-tools", "path/to/nowhere")
+//     .after("librustc-tool")
+//     .after("libtest-tool")
+//     .after("libstd-tool");
+//
+//rules.build("librustc-tool", "path/to/nowhere")
+//     .dep(|s| s.name("librustc"))
+//     .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Librustc));
+//rules.build("libtest-tool", "path/to/nowhere")
+//     .dep(|s| s.name("libtest"))
+//     .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libtest));
+//rules.build("libstd-tool", "path/to/nowhere")
+//     .dep(|s| s.name("libstd"))
+//     .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libstd));
+//
+
+#[derive(Serialize)]
+pub struct CleanTools<'a> {
+    pub stage: u32,
+    pub target: &'a str,
+    pub mode: Mode,
+}
+
+impl<'a> Step<'a> for CleanTools<'a> {
+    type Output = ();
+
+    /// Build a tool in `src/tools`
+    ///
+    /// This will build the specified tool with the specified `host` compiler in
+    /// `stage` into the normal cargo output directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        let mode = self.mode;
+
+        let compiler = Compiler::new(stage, &build.build);
+
+        let stamp = match mode {
+            Mode::Libstd => libstd_stamp(build, &compiler, target),
+            Mode::Libtest => libtest_stamp(build, &compiler, target),
+            Mode::Librustc => librustc_stamp(build, &compiler, target),
+            _ => panic!(),
+        };
+        let out_dir = build.cargo_out(&compiler, Mode::Tool, target);
+        build.clear_if_dirty(&out_dir, &stamp);
+    }
+}
+
+// rules.build("tool-rustbook", "src/tools/rustbook")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("librustc-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "rustbook"));
+// rules.build("tool-error-index", "src/tools/error_index_generator")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("librustc-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator"));
+// rules.build("tool-unstable-book-gen", "src/tools/unstable-book-gen")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "unstable-book-gen"));
+// rules.build("tool-tidy", "src/tools/tidy")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "tidy"));
+// rules.build("tool-linkchecker", "src/tools/linkchecker")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "linkchecker"));
+// rules.build("tool-cargotest", "src/tools/cargotest")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "cargotest"));
+// rules.build("tool-compiletest", "src/tools/compiletest")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libtest-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
+// rules.build("tool-build-manifest", "src/tools/build-manifest")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
+// rules.build("tool-remote-test-server", "src/tools/remote-test-server")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server"));
+// rules.build("tool-remote-test-client", "src/tools/remote-test-client")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
+// rules.build("tool-rust-installer", "src/tools/rust-installer")
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
+// rules.build("tool-cargo", "src/tools/cargo")
+//      .host(true)
+//      .default(build.config.extended)
+//      .dep(|s| s.name("maybe-clean-tools"))
+//      .dep(|s| s.name("libstd-tool"))
+//      .dep(|s| s.stage(0).host(s.target).name("openssl"))
+//      .dep(move |s| {
+//          // Cargo depends on procedural macros, which requires a full host
+//          // compiler to be available, so we need to depend on that.
+//          s.name("librustc-link")
+//           .target(&build.build)
+//           .host(&build.build)
+//      })
+//      .run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
+// rules.build("tool-rls", "src/tools/rls")
+//      .host(true)
+//      .default(build.config.extended)
+//      .dep(|s| s.name("librustc-tool"))
+//      .dep(|s| s.stage(0).host(s.target).name("openssl"))
+//      .dep(move |s| {
+//          // rls, like cargo, uses procedural macros
+//          s.name("librustc-link")
+//           .target(&build.build)
+//           .host(&build.build)
+//      })
+//      .run(move |s| compile::tool(build, s.stage, s.target, "rls"));
+//
+
+#[derive(Serialize)]
+pub struct Tool<'a> {
+    pub stage: u32,
+    pub target: &'a str,
+    pub tool: &'a str,
+}
+
+impl<'a> Step<'a> for Tool<'a> {
+    type Output = ();
+
+    /// Build a tool in `src/tools`
+    ///
+    /// This will build the specified tool with the specified `host` compiler in
+    /// `stage` into the normal cargo output directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        let tool = self.tool;
+
+        let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
+        println!("Building stage{} tool {} ({})", stage, tool, target);
+
+        let compiler = Compiler::new(stage, &build.build);
+
+        let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
+        let dir = build.src.join("src/tools").join(tool);
+        cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
+
+        // We don't want to build tools dynamically as they'll be running across
+        // stages and such and it's just easier if they're not dynamically linked.
+        cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+        if let Some(dir) = build.openssl_install_dir(target) {
+            cargo.env("OPENSSL_STATIC", "1");
+            cargo.env("OPENSSL_DIR", dir);
+            cargo.env("LIBZ_SYS_STATIC", "1");
+        }
+
+        cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
+
+        let info = GitInfo::new(&dir);
+        if let Some(sha) = info.sha() {
+            cargo.env("CFG_COMMIT_HASH", sha);
+        }
+        if let Some(sha_short) = info.sha_short() {
+            cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
+        }
+        if let Some(date) = info.commit_date() {
+            cargo.env("CFG_COMMIT_DATE", date);
+        }
+
+        build.run(&mut cargo);
+    }
+}