TOOL_DEPS_rustc := rustc_driver
TOOL_DEPS_rustbook := std rustdoc
TOOL_DEPS_error_index_generator := rustdoc syntax serialize
-TOOL_SOURCE_compiletest := $(S)src/compiletest/compiletest.rs
+TOOL_SOURCE_compiletest := $(S)src/tools/compiletest/src/main.rs
TOOL_SOURCE_rustdoc := $(S)src/driver/driver.rs
TOOL_SOURCE_rustc := $(S)src/driver/driver.rs
TOOL_SOURCE_rustbook := $(S)src/tools/rustbook/main.rs
$(addprefix $(S)src/, \
bootstrap \
build_helper \
- compiletest \
doc \
driver \
etc \
--run-lib-path $$(TLIB$(1)_T_$(2)_H_$(3)) \
--rustc-path $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \
--rustdoc-path $$(HBIN$(1)_H_$(3))/rustdoc$$(X_$(3)) \
- --llvm-bin-path $(CFG_LLVM_INST_DIR_$(CFG_BUILD))/bin \
+ --llvm-filecheck $(CFG_LLVM_INST_DIR_$(CFG_BUILD))/bin/FileCheck \
--aux-base $$(S)src/test/auxiliary/ \
--stage-id stage$(1)-$(2) \
--target $(2) \
# Run the bootstrap
args = [os.path.join(rb.build_dir, "bootstrap/debug/bootstrap")]
-args.extend(sys.argv[1:])
args.append('--src')
args.append(rb.rust_root)
args.append('--build')
args.append(rb.build)
+args.extend(sys.argv[1:])
env = os.environ.copy()
env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
rb.run(args, env)
// except according to those terms.
use std::fs;
+use std::path::PathBuf;
use build::{Build, Compiler};
build.run(build.tool_cmd(&compiler, "tidy")
.arg(build.src.join("src")));
}
+
+fn testdir(build: &Build, host: &str) -> PathBuf {
+ build.out.join(host).join("test")
+}
+
+pub fn compiletest(build: &Build,
+ compiler: &Compiler,
+ target: &str,
+ mode: &str,
+ suite: &str) {
+ let mut cmd = build.tool_cmd(compiler, "compiletest");
+
+ cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
+ cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
+ cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
+ cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
+ cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+ cmd.arg("--aux-base").arg(build.src.join("src/test/auxiliary"));
+ cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+ cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+ cmd.arg("--mode").arg(mode);
+ cmd.arg("--target").arg(target);
+ cmd.arg("--host").arg(compiler.host);
+ cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build));
+
+ let linkflag = format!("-Lnative={}", build.test_helpers_out(target).display());
+ cmd.arg("--host-rustcflags").arg("-Crpath");
+ cmd.arg("--target-rustcflags").arg(format!("-Crpath {}", linkflag));
+
+ // FIXME: needs android support
+ cmd.arg("--android-cross-path").arg("");
+ // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere
+ cmd.arg("--python").arg("python");
+
+ if let Some(ref vers) = build.gdb_version {
+ cmd.arg("--gdb-version").arg(vers);
+ }
+ if let Some(ref vers) = build.lldb_version {
+ cmd.arg("--lldb-version").arg(vers);
+ }
+ if let Some(ref dir) = build.lldb_python_dir {
+ cmd.arg("--lldb-python-dir").arg(dir);
+ }
+
+ cmd.args(&build.flags.args);
+
+ if build.config.verbose || build.flags.verbose {
+ cmd.arg("--verbose");
+ }
+
+ build.run(&mut cmd);
+}
rm_rf(build, &out.join(format!("stage{}", stage)));
rm_rf(build, &out.join(format!("stage{}-std", stage)));
rm_rf(build, &out.join(format!("stage{}-rustc", stage)));
- rm_rf(build, &out.join(format!("stage{}-test", stage)));
rm_rf(build, &out.join(format!("stage{}-tools", stage)));
+ rm_rf(build, &out.join(format!("stage{}-test", stage)));
}
}
}
if !build.unstable_features {
cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
}
- let target_config = build.config.target_config.get(target);
- if let Some(ref s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
- cargo.env("LLVM_CONFIG", s);
- } else {
- let llvm_config = build.llvm_out(&build.config.build).join("bin")
- .join(exe("llvm-config", target));
- cargo.env("LLVM_CONFIG", llvm_config);
- }
+ cargo.env("LLVM_CONFIG", build.llvm_config(target));
if build.config.llvm_static_stdcpp {
cargo.env("LLVM_STATIC_STDCPP",
compiler_file(build.cxx(target), "libstdc++.a"));
cp_r(&build.src.join("man"), &image.join("share/man/man1"));
// Debugger scripts
- let cp_debugger_script = |file: &str| {
- let dst = image.join("lib/rustlib/etc");
- t!(fs::create_dir_all(&dst));
- install(&build.src.join("src/etc/").join(file), &dst, 0o644);
- };
- if host.contains("windows") {
- // no debugger scripts
- } else if host.contains("darwin") {
- // lldb debugger scripts
- install(&build.src.join("src/etc/rust-lldb"), &image.join("bin"),
- 0o755);
-
- cp_debugger_script("lldb_rust_formatters.py");
- cp_debugger_script("debugger_pretty_printers_common.py");
- } else {
- // gdb debugger scripts
- install(&build.src.join("src/etc/rust-gdb"), &image.join("bin"),
- 0o755);
-
- cp_debugger_script("gdb_load_rust_pretty_printers.py");
- cp_debugger_script("gdb_rust_pretty_printing.py");
- cp_debugger_script("debugger_pretty_printers_common.py");
- }
+ debugger_scripts(build, &image, host);
// Misc license info
let cp = |file: &str| {
}
}
+pub fn debugger_scripts(build: &Build,
+ sysroot: &Path,
+ host: &str) {
+ let cp_debugger_script = |file: &str| {
+ let dst = sysroot.join("lib/rustlib/etc");
+ t!(fs::create_dir_all(&dst));
+ install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+ };
+ if host.contains("windows") {
+ // no debugger scripts
+ } else if host.contains("darwin") {
+ // lldb debugger scripts
+ install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+ 0o755);
+
+ cp_debugger_script("lldb_rust_formatters.py");
+ cp_debugger_script("debugger_pretty_printers_common.py");
+ } else {
+ // gdb debugger scripts
+ install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+ 0o755);
+
+ cp_debugger_script("gdb_load_rust_pretty_printers.py");
+ cp_debugger_script("gdb_rust_pretty_printing.py");
+ cp_debugger_script("debugger_pretty_printers_common.py");
+ }
+}
+
+
pub fn std(build: &Build, compiler: &Compiler, target: &str) {
println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
target);
usage(0);
}
- if m.free.len() > 0 {
- println!("free arguments are not currently accepted");
- usage(1);
- }
-
let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| {
if fs::metadata("config.toml").is_ok() {
Some(PathBuf::from("config.toml"))
package_vers: String,
bootstrap_key: String,
+ // Probed tools at runtime
+ gdb_version: Option<String>,
+ lldb_version: Option<String>,
+ lldb_python_dir: Option<String>,
+
// Runtime state filled in later on
cc: HashMap<String, (gcc::Tool, PathBuf)>,
cxx: HashMap<String, gcc::Tool>,
cc: HashMap::new(),
cxx: HashMap::new(),
compiler_rt_built: RefCell::new(HashMap::new()),
+ gdb_version: None,
+ lldb_version: None,
+ lldb_python_dir: None,
}
}
CompilerRt { _dummy } => {
native::compiler_rt(self, target.target);
}
+ TestHelpers { _dummy } => {
+ native::test_helpers(self, target.target);
+ }
Libstd { compiler } => {
compile::std(self, target.target, &compiler);
}
ToolTidy { stage } => {
compile::tool(self, stage, target.target, "tidy");
}
+ ToolCompiletest { stage } => {
+ compile::tool(self, stage, target.target, "compiletest");
+ }
DocBook { stage } => {
doc::rustbook(self, stage, target.target, "book", &doc_out);
}
CheckTidy { stage } => {
check::tidy(self, stage, target.target);
}
+ CheckRPass { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "run-pass", "run-pass");
+ }
+ CheckCFail { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "compile-fail", "compile-fail");
+ }
+ CheckPFail { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "parse-fail", "parse-fail");
+ }
+ CheckRFail { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "run-fail", "run-fail");
+ }
+ CheckPretty { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "pretty", "pretty");
+ }
+ CheckCodegen { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "codegen", "codegen");
+ }
+ CheckCodegenUnits { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "codegen-units", "codegen-units");
+ }
+ CheckDebuginfo { compiler } => {
+ if target.target.contains("msvc") ||
+ target.target.contains("android") {
+ // nothing to do
+ } else if target.target.contains("apple") {
+ check::compiletest(self, &compiler, target.target,
+ "debuginfo-lldb", "debuginfo");
+ } else {
+ check::compiletest(self, &compiler, target.target,
+ "debuginfo-gdb", "debuginfo");
+ }
+ }
+ CheckRustdoc { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "rustdoc", "rustdoc");
+ }
+ CheckRPassValgrind { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "run-pass-valgrind", "run-pass-valgrind");
+ }
+ CheckRPassFull { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "run-pass", "run-pass-fulldeps");
+ }
+ CheckCFailFull { compiler } => {
+ check::compiletest(self, &compiler, target.target,
+ "compile-fail", "compile-fail-fulldeps")
+ }
DistDocs { stage } => dist::docs(self, stage, target.target),
DistMingw { _dummy } => dist::mingw(self, target.target),
DistRustc { stage } => dist::rustc(self, stage, target.target),
DistStd { compiler } => dist::std(self, &compiler, target.target),
+ DebuggerScripts { stage } => {
+ let compiler = Compiler::new(stage, target.target);
+ dist::debugger_scripts(self,
+ &self.sysroot(&compiler),
+ target.target);
+ }
+
Dist { .. } |
Doc { .. } | // pseudo-steps
Check { .. } => {}
self.cargo_out(compiler, Mode::Libstd, host).join("deps"),
self.cargo_out(compiler, Mode::Libtest, host).join("deps"),
self.cargo_out(compiler, Mode::Librustc, host).join("deps"),
+ self.cargo_out(compiler, Mode::Tool, host).join("deps"),
];
add_lib_path(paths, &mut cmd);
return cmd
let suffix = match mode {
Mode::Libstd => "-std",
Mode::Libtest => "-test",
- Mode::Tool | Mode::Librustc => "-rustc",
+ Mode::Tool => "-tools",
+ Mode::Librustc => "-rustc",
};
self.out.join(compiler.host)
.join(format!("stage{}{}", compiler.stage, suffix))
self.out.join(target).join("llvm")
}
+ /// Returns the path to `llvm-config` for the specified target
+ fn llvm_config(&self, target: &str) -> PathBuf {
+ let target_config = self.config.target_config.get(target);
+ if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+ s.clone()
+ } else {
+ self.llvm_out(&self.config.build).join("bin")
+ .join(exe("llvm-config", target))
+ }
+ }
+
+ /// Returns the path to `llvm-config` for the specified target
+ fn llvm_filecheck(&self, target: &str) -> PathBuf {
+ let target_config = self.config.target_config.get(target);
+ if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+ s.parent().unwrap().join(exe("FileCheck", target))
+ } else {
+ let base = self.llvm_out(&self.config.build).join("build");
+ let exe = exe("FileCheck", target);
+ if self.config.build.contains("msvc") {
+ base.join("Release/bin").join(exe)
+ } else {
+ base.join("bin").join(exe)
+ }
+ }
+ }
+
/// Root output directory for compiler-rt compiled for `target`
fn compiler_rt_out(&self, target: &str) -> PathBuf {
self.out.join(target).join("compiler-rt")
}
+ /// Root output directory for rust_test_helpers library compiled for
+ /// `target`
+ fn test_helpers_out(&self, target: &str) -> PathBuf {
+ self.out.join(target).join("rust-test-helpers")
+ }
+
fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
// Windows doesn't need dylib path munging because the dlls for the
// compiler live next to the compiler and the system will find them
}
fn cflags(&self, target: &str) -> Vec<String> {
+ // Filter out -O and /O (the optimization flags) that we picked up from
+ // gcc-rs because the build scripts will determine that for themselves.
let mut base = self.cc[target].0.args().iter()
.map(|s| s.to_string_lossy().into_owned())
+ .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
.collect::<Vec<_>>();
// If we're compiling on OSX then we add a few unconditional flags
use build_helper::output;
use cmake;
+use gcc;
use build::Build;
-use build::util::{exe, staticlib};
+use build::util::{exe, staticlib, up_to_date};
pub fn llvm(build: &Build, target: &str) {
// If we're using a custom LLVM bail out here, but we can only use a
}
let _ = fs::remove_dir_all(&dst);
t!(fs::create_dir_all(&dst));
- let build_llvm_config = build.llvm_out(&build.config.build)
- .join("bin")
- .join(exe("llvm-config", &build.config.build));
+ let build_llvm_config = build.llvm_config(&build.config.build);
let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt"));
cfg.target(target)
.host(&build.config.build)
.build_target(&build_target);
cfg.build();
}
+
+pub fn test_helpers(build: &Build, target: &str) {
+ let dst = build.test_helpers_out(target);
+ let src = build.src.join("src/rt/rust_test_helpers.c");
+ if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
+ return
+ }
+
+ println!("Building test helpers");
+ t!(fs::create_dir_all(&dst));
+ let mut cfg = gcc::Config::new();
+ cfg.cargo_metadata(false)
+ .out_dir(&dst)
+ .target(target)
+ .host(&build.config.build)
+ .opt_level(0)
+ .debug(false)
+ .file(build.src.join("src/rt/rust_test_helpers.c"))
+ .compile("librust_test_helpers.a");
+}
need_cmd(build.cxx(host).as_ref());
}
+ // Externally configured LLVM requires FileCheck to exist
+ let filecheck = build.llvm_filecheck(&build.config.build);
+ if !filecheck.starts_with(&build.out) && !filecheck.exists() {
+ panic!("filecheck executable {:?} does not exist", filecheck);
+ }
+
for target in build.config.target.iter() {
// Either can't build or don't want to run jemalloc on these targets
if target.contains("rumprun") ||
target);
}
}
+
+ let run = |cmd: &mut Command| {
+ cmd.output().map(|output| {
+ String::from_utf8_lossy(&output.stdout)
+ .lines().next().unwrap()
+ .to_string()
+ })
+ };
+ build.gdb_version = run(Command::new("gdb").arg("--version")).ok();
+ build.lldb_version = run(Command::new("lldb").arg("--version")).ok();
+ if build.lldb_version.is_some() {
+ build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
+ }
}
(tool_error_index, ToolErrorIndex { stage: u32 }),
(tool_cargotest, ToolCargoTest { stage: u32 }),
(tool_tidy, ToolTidy { stage: u32 }),
+ (tool_compiletest, ToolCompiletest { stage: u32 }),
// Steps for long-running native builds. Ideally these wouldn't
// actually exist and would be part of build scripts, but for now
// with braces are unstable so we just pick something that works.
(llvm, Llvm { _dummy: () }),
(compiler_rt, CompilerRt { _dummy: () }),
+ (test_helpers, TestHelpers { _dummy: () }),
+ (debugger_scripts, DebuggerScripts { stage: u32 }),
// Steps for various pieces of documentation that we can generate,
// the 'doc' step is just a pseudo target to depend on a bunch of
(check_linkcheck, CheckLinkcheck { stage: u32 }),
(check_cargotest, CheckCargoTest { stage: u32 }),
(check_tidy, CheckTidy { stage: u32 }),
+ (check_rpass, CheckRPass { compiler: Compiler<'a> }),
+ (check_rfail, CheckRFail { compiler: Compiler<'a> }),
+ (check_cfail, CheckCFail { compiler: Compiler<'a> }),
+ (check_pfail, CheckPFail { compiler: Compiler<'a> }),
+ (check_codegen, CheckCodegen { compiler: Compiler<'a> }),
+ (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }),
+ (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }),
+ (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }),
+ (check_pretty, CheckPretty { compiler: Compiler<'a> }),
+ (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }),
+ (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }),
+ (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }),
// Distribution targets, creating tarballs
(dist, Dist { stage: u32 }),
vec![self.llvm(()).target(&build.config.build)]
}
Source::Llvm { _dummy } => Vec::new(),
+ Source::TestHelpers { _dummy } => Vec::new(),
+ Source::DebuggerScripts { stage: _ } => Vec::new(),
// Note that all doc targets depend on artifacts from the build
// architecture, not the target (which is where we're generating
self.doc_std(stage),
self.doc_error_index(stage)]
}
- Source::Check { stage, compiler: _ } => {
- vec![self.check_linkcheck(stage),
- self.dist(stage)]
+ Source::Check { stage, compiler } => {
+ vec![
+ self.check_rpass(compiler),
+ self.check_cfail(compiler),
+ self.check_rfail(compiler),
+ self.check_pfail(compiler),
+ self.check_codegen(compiler),
+ self.check_codegen_units(compiler),
+ self.check_debuginfo(compiler),
+ self.check_rustdoc(compiler),
+ self.check_pretty(compiler),
+ self.check_rpass_valgrind(compiler),
+ self.check_rpass_full(compiler),
+ self.check_cfail_full(compiler),
+ self.check_linkcheck(stage),
+ self.dist(stage),
+ ]
}
Source::CheckLinkcheck { stage } => {
vec![self.tool_linkchecker(stage), self.doc(stage)]
Source::CheckTidy { stage } => {
vec![self.tool_tidy(stage)]
}
+ Source::CheckRFail { compiler } |
+ Source::CheckPFail { compiler } |
+ Source::CheckCodegen { compiler } |
+ Source::CheckCodegenUnits { compiler } |
+ Source::CheckRustdoc { compiler } |
+ Source::CheckPretty { compiler } |
+ Source::CheckCFail { compiler } |
+ Source::CheckRPassValgrind { compiler } |
+ Source::CheckRPass { compiler } => {
+ vec![
+ self.libtest(compiler),
+ self.tool_compiletest(compiler.stage),
+ self.test_helpers(()),
+ ]
+ }
+ Source::CheckDebuginfo { compiler } => {
+ vec![
+ self.libtest(compiler),
+ self.tool_compiletest(compiler.stage),
+ self.test_helpers(()),
+ self.debugger_scripts(compiler.stage),
+ ]
+ }
+ Source::CheckRPassFull { compiler } |
+ Source::CheckCFailFull { compiler } => {
+ vec![self.librustc(compiler),
+ self.tool_compiletest(compiler.stage)]
+ }
Source::ToolLinkchecker { stage } |
Source::ToolTidy { stage } => {
Source::ToolCargoTest { stage } => {
vec![self.libstd(self.compiler(stage))]
}
+ Source::ToolCompiletest { stage } => {
+ vec![self.libtest(self.compiler(stage))]
+ }
Source::DistDocs { stage } => vec![self.doc(stage)],
Source::DistMingw { _dummy: _ } => Vec::new(),
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-pub use self::Mode::*;
-
-use std::fmt;
-use std::str::FromStr;
-use std::path::PathBuf;
-
-#[derive(Clone, Copy, PartialEq, Debug)]
-pub enum Mode {
- CompileFail,
- ParseFail,
- RunFail,
- RunPass,
- RunPassValgrind,
- Pretty,
- DebugInfoGdb,
- DebugInfoLldb,
- Codegen,
- Rustdoc,
- CodegenUnits,
- Incremental,
-}
-
-impl FromStr for Mode {
- type Err = ();
- fn from_str(s: &str) -> Result<Mode, ()> {
- match s {
- "compile-fail" => Ok(CompileFail),
- "parse-fail" => Ok(ParseFail),
- "run-fail" => Ok(RunFail),
- "run-pass" => Ok(RunPass),
- "run-pass-valgrind" => Ok(RunPassValgrind),
- "pretty" => Ok(Pretty),
- "debuginfo-lldb" => Ok(DebugInfoLldb),
- "debuginfo-gdb" => Ok(DebugInfoGdb),
- "codegen" => Ok(Codegen),
- "rustdoc" => Ok(Rustdoc),
- "codegen-units" => Ok(CodegenUnits),
- "incremental" => Ok(Incremental),
- _ => Err(()),
- }
- }
-}
-
-impl fmt::Display for Mode {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Display::fmt(match *self {
- CompileFail => "compile-fail",
- ParseFail => "parse-fail",
- RunFail => "run-fail",
- RunPass => "run-pass",
- RunPassValgrind => "run-pass-valgrind",
- Pretty => "pretty",
- DebugInfoGdb => "debuginfo-gdb",
- DebugInfoLldb => "debuginfo-lldb",
- Codegen => "codegen",
- Rustdoc => "rustdoc",
- CodegenUnits => "codegen-units",
- Incremental => "incremental",
- }, f)
- }
-}
-
-#[derive(Clone)]
-pub struct Config {
- // The library paths required for running the compiler
- pub compile_lib_path: PathBuf,
-
- // The library paths required for running compiled programs
- pub run_lib_path: PathBuf,
-
- // The rustc executable
- pub rustc_path: PathBuf,
-
- // The rustdoc executable
- pub rustdoc_path: PathBuf,
-
- // The python executable
- pub python: String,
-
- // The llvm binaries path
- pub llvm_bin_path: Option<PathBuf>,
-
- // The valgrind path
- pub valgrind_path: Option<String>,
-
- // Whether to fail if we can't run run-pass-valgrind tests under valgrind
- // (or, alternatively, to silently run them like regular run-pass tests).
- pub force_valgrind: bool,
-
- // The directory containing the tests to run
- pub src_base: PathBuf,
-
- // The directory where programs should be built
- pub build_base: PathBuf,
-
- // Directory for auxiliary libraries
- pub aux_base: PathBuf,
-
- // The name of the stage being built (stage1, etc)
- pub stage_id: String,
-
- // The test mode, compile-fail, run-fail, run-pass
- pub mode: Mode,
-
- // Run ignored tests
- pub run_ignored: bool,
-
- // Only run tests that match this filter
- pub filter: Option<String>,
-
- // Write out a parseable log of tests that were run
- pub logfile: Option<PathBuf>,
-
- // A command line to prefix program execution with,
- // for running under valgrind
- pub runtool: Option<String>,
-
- // Flags to pass to the compiler when building for the host
- pub host_rustcflags: Option<String>,
-
- // Flags to pass to the compiler when building for the target
- pub target_rustcflags: Option<String>,
-
- // Target system to be tested
- pub target: String,
-
- // Host triple for the compiler being invoked
- pub host: String,
-
- // Version of GDB
- pub gdb_version: Option<String>,
-
- // Version of LLDB
- pub lldb_version: Option<String>,
-
- // Path to the android tools
- pub android_cross_path: PathBuf,
-
- // Extra parameter to run adb on arm-linux-androideabi
- pub adb_path: String,
-
- // Extra parameter to run test suite on arm-linux-androideabi
- pub adb_test_dir: String,
-
- // status whether android device available or not
- pub adb_device_status: bool,
-
- // the path containing LLDB's Python module
- pub lldb_python_dir: Option<String>,
-
- // Explain what's going on
- pub verbose: bool,
-
- // Print one character per test instead of one line
- pub quiet: bool,
-}
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![crate_type = "bin"]
-
-#![feature(box_syntax)]
-#![feature(libc)]
-#![feature(rustc_private)]
-#![feature(test)]
-#![feature(question_mark)]
-
-#![deny(warnings)]
-
-extern crate libc;
-extern crate test;
-extern crate getopts;
-
-#[macro_use]
-extern crate log;
-
-use std::env;
-use std::fs;
-use std::io;
-use std::path::{Path, PathBuf};
-use getopts::{optopt, optflag, reqopt};
-use common::Config;
-use common::{Pretty, DebugInfoGdb, DebugInfoLldb};
-use test::TestPaths;
-use util::logv;
-
-pub mod procsrv;
-pub mod util;
-pub mod header;
-pub mod runtest;
-pub mod common;
-pub mod errors;
-mod raise_fd_limit;
-
-pub fn main() {
- let config = parse_config(env::args().collect());
-
- if config.valgrind_path.is_none() && config.force_valgrind {
- panic!("Can't find Valgrind to run Valgrind tests");
- }
-
- log_config(&config);
- run_tests(&config);
-}
-
-pub fn parse_config(args: Vec<String> ) -> Config {
-
- let groups : Vec<getopts::OptGroup> =
- vec!(reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"),
- reqopt("", "run-lib-path", "path to target shared libraries", "PATH"),
- reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"),
- reqopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH"),
- reqopt("", "python", "path to python to use for doc tests", "PATH"),
- optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM"),
- optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind"),
- optopt("", "llvm-bin-path", "path to directory holding llvm binaries", "DIR"),
- reqopt("", "src-base", "directory to scan for test files", "PATH"),
- reqopt("", "build-base", "directory to deposit test outputs", "PATH"),
- reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"),
- reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"),
- reqopt("", "mode", "which sort of compile tests to run",
- "(compile-fail|parse-fail|run-fail|run-pass|\
- run-pass-valgrind|pretty|debug-info|incremental)"),
- optflag("", "ignored", "run tests marked as ignored"),
- optopt("", "runtool", "supervisor program to run tests under \
- (eg. emulator, valgrind)", "PROGRAM"),
- optopt("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS"),
- optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS"),
- optflag("", "verbose", "run tests verbosely, showing all output"),
- optflag("", "quiet", "print one character per test instead of one line"),
- optopt("", "logfile", "file to log test execution to", "FILE"),
- optopt("", "target", "the target to build for", "TARGET"),
- optopt("", "host", "the host to build for", "HOST"),
- optopt("", "gdb-version", "the version of GDB used", "VERSION STRING"),
- optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING"),
- optopt("", "android-cross-path", "Android NDK standalone path", "PATH"),
- optopt("", "adb-path", "path to the android debugger", "PATH"),
- optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
- optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH"),
- optflag("h", "help", "show this message"));
-
- let (argv0, args_) = args.split_first().unwrap();
- if args.len() == 1 || args[1] == "-h" || args[1] == "--help" {
- let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
- println!("{}", getopts::usage(&message, &groups));
- println!("");
- panic!()
- }
-
- let matches =
- &match getopts::getopts(args_, &groups) {
- Ok(m) => m,
- Err(f) => panic!("{:?}", f)
- };
-
- if matches.opt_present("h") || matches.opt_present("help") {
- let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
- println!("{}", getopts::usage(&message, &groups));
- println!("");
- panic!()
- }
-
- fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf {
- match m.opt_str(nm) {
- Some(s) => PathBuf::from(&s),
- None => panic!("no option (=path) found for {}", nm),
- }
- }
-
- fn make_absolute(path: PathBuf) -> PathBuf {
- if path.is_relative() {
- env::current_dir().unwrap().join(path)
- } else {
- path
- }
- }
-
- Config {
- compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
- run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
- rustc_path: opt_path(matches, "rustc-path"),
- rustdoc_path: opt_path(matches, "rustdoc-path"),
- python: matches.opt_str("python").unwrap(),
- valgrind_path: matches.opt_str("valgrind-path"),
- force_valgrind: matches.opt_present("force-valgrind"),
- llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| PathBuf::from(&s)),
- src_base: opt_path(matches, "src-base"),
- build_base: opt_path(matches, "build-base"),
- aux_base: opt_path(matches, "aux-base"),
- stage_id: matches.opt_str("stage-id").unwrap(),
- mode: matches.opt_str("mode").unwrap().parse().ok().expect("invalid mode"),
- run_ignored: matches.opt_present("ignored"),
- filter: matches.free.first().cloned(),
- logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
- runtool: matches.opt_str("runtool"),
- host_rustcflags: matches.opt_str("host-rustcflags"),
- target_rustcflags: matches.opt_str("target-rustcflags"),
- target: opt_str2(matches.opt_str("target")),
- host: opt_str2(matches.opt_str("host")),
- gdb_version: extract_gdb_version(matches.opt_str("gdb-version")),
- lldb_version: extract_lldb_version(matches.opt_str("lldb-version")),
- android_cross_path: opt_path(matches, "android-cross-path"),
- adb_path: opt_str2(matches.opt_str("adb-path")),
- adb_test_dir: format!("{}/{}",
- opt_str2(matches.opt_str("adb-test-dir")),
- opt_str2(matches.opt_str("target"))),
- adb_device_status:
- opt_str2(matches.opt_str("target")).contains("android") &&
- "(none)" != opt_str2(matches.opt_str("adb-test-dir")) &&
- !opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
- lldb_python_dir: matches.opt_str("lldb-python-dir"),
- verbose: matches.opt_present("verbose"),
- quiet: matches.opt_present("quiet"),
- }
-}
-
-pub fn log_config(config: &Config) {
- let c = config;
- logv(c, format!("configuration:"));
- logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path));
- logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
- logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
- logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path.display()));
- logv(c, format!("src_base: {:?}", config.src_base.display()));
- logv(c, format!("build_base: {:?}", config.build_base.display()));
- logv(c, format!("stage_id: {}", config.stage_id));
- logv(c, format!("mode: {}", config.mode));
- logv(c, format!("run_ignored: {}", config.run_ignored));
- logv(c, format!("filter: {}",
- opt_str(&config.filter
- .as_ref()
- .map(|re| re.to_owned()))));
- logv(c, format!("runtool: {}", opt_str(&config.runtool)));
- logv(c, format!("host-rustcflags: {}",
- opt_str(&config.host_rustcflags)));
- logv(c, format!("target-rustcflags: {}",
- opt_str(&config.target_rustcflags)));
- logv(c, format!("target: {}", config.target));
- logv(c, format!("host: {}", config.host));
- logv(c, format!("android-cross-path: {:?}",
- config.android_cross_path.display()));
- logv(c, format!("adb_path: {:?}", config.adb_path));
- logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
- logv(c, format!("adb_device_status: {}",
- config.adb_device_status));
- logv(c, format!("verbose: {}", config.verbose));
- logv(c, format!("quiet: {}", config.quiet));
- logv(c, format!("\n"));
-}
-
-pub fn opt_str<'a>(maybestr: &'a Option<String>) -> &'a str {
- match *maybestr {
- None => "(none)",
- Some(ref s) => s,
- }
-}
-
-pub fn opt_str2(maybestr: Option<String>) -> String {
- match maybestr {
- None => "(none)".to_owned(),
- Some(s) => s,
- }
-}
-
-pub fn run_tests(config: &Config) {
- if config.target.contains("android") {
- if let DebugInfoGdb = config.mode {
- println!("{} debug-info test uses tcp 5039 port.\
- please reserve it", config.target);
- }
-
- // android debug-info test uses remote debugger
- // so, we test 1 thread at once.
- // also trying to isolate problems with adb_run_wrapper.sh ilooping
- env::set_var("RUST_TEST_THREADS","1");
- }
-
- match config.mode {
- DebugInfoLldb => {
- // Some older versions of LLDB seem to have problems with multiple
- // instances running in parallel, so only run one test thread at a
- // time.
- env::set_var("RUST_TEST_THREADS", "1");
- }
- _ => { /* proceed */ }
- }
-
- let opts = test_opts(config);
- let tests = make_tests(config);
- // sadly osx needs some file descriptor limits raised for running tests in
- // parallel (especially when we have lots and lots of child processes).
- // For context, see #8904
- unsafe { raise_fd_limit::raise_fd_limit(); }
- // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows
- // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary
- env::set_var("__COMPAT_LAYER", "RunAsInvoker");
- let res = test::run_tests_console(&opts, tests.into_iter().collect());
- match res {
- Ok(true) => {}
- Ok(false) => panic!("Some tests failed"),
- Err(e) => {
- println!("I/O failure during tests: {:?}", e);
- }
- }
-}
-
-pub fn test_opts(config: &Config) -> test::TestOpts {
- test::TestOpts {
- filter: config.filter.clone(),
- run_ignored: config.run_ignored,
- quiet: config.quiet,
- logfile: config.logfile.clone(),
- run_tests: true,
- bench_benchmarks: true,
- nocapture: match env::var("RUST_TEST_NOCAPTURE") {
- Ok(val) => &val != "0",
- Err(_) => false
- },
- color: test::AutoColor,
- }
-}
-
-pub fn make_tests(config: &Config) -> Vec<test::TestDescAndFn> {
- debug!("making tests from {:?}",
- config.src_base.display());
- let mut tests = Vec::new();
- collect_tests_from_dir(config,
- &config.src_base,
- &config.src_base,
- &PathBuf::new(),
- &mut tests)
- .unwrap();
- tests
-}
-
-fn collect_tests_from_dir(config: &Config,
- base: &Path,
- dir: &Path,
- relative_dir_path: &Path,
- tests: &mut Vec<test::TestDescAndFn>)
- -> io::Result<()> {
- // Ignore directories that contain a file
- // `compiletest-ignore-dir`.
- for file in fs::read_dir(dir)? {
- let file = file?;
- if file.file_name() == *"compiletest-ignore-dir" {
- return Ok(());
- }
- }
-
- let dirs = fs::read_dir(dir)?;
- for file in dirs {
- let file = file?;
- let file_path = file.path();
- debug!("inspecting file {:?}", file_path.display());
- if is_test(config, &file_path) {
- // If we find a test foo/bar.rs, we have to build the
- // output directory `$build/foo` so we can write
- // `$build/foo/bar` into it. We do this *now* in this
- // sequential loop because otherwise, if we do it in the
- // tests themselves, they race for the privilege of
- // creating the directories and sometimes fail randomly.
- let build_dir = config.build_base.join(&relative_dir_path);
- fs::create_dir_all(&build_dir).unwrap();
-
- let paths = TestPaths {
- file: file_path,
- base: base.to_path_buf(),
- relative_dir: relative_dir_path.to_path_buf(),
- };
- tests.push(make_test(config, &paths))
- } else if file_path.is_dir() {
- let relative_file_path = relative_dir_path.join(file.file_name());
- collect_tests_from_dir(config,
- base,
- &file_path,
- &relative_file_path,
- tests)?;
- }
- }
- Ok(())
-}
-
-pub fn is_test(config: &Config, testfile: &Path) -> bool {
- // Pretty-printer does not work with .rc files yet
- let valid_extensions =
- match config.mode {
- Pretty => vec!(".rs".to_owned()),
- _ => vec!(".rc".to_owned(), ".rs".to_owned())
- };
- let invalid_prefixes = vec!(".".to_owned(), "#".to_owned(), "~".to_owned());
- let name = testfile.file_name().unwrap().to_str().unwrap();
-
- let mut valid = false;
-
- for ext in &valid_extensions {
- if name.ends_with(ext) {
- valid = true;
- }
- }
-
- for pre in &invalid_prefixes {
- if name.starts_with(pre) {
- valid = false;
- }
- }
-
- return valid;
-}
-
-pub fn make_test(config: &Config, testpaths: &TestPaths) -> test::TestDescAndFn {
- let early_props = header::early_props(config, &testpaths.file);
-
- // The `should-fail` annotation doesn't apply to pretty tests,
- // since we run the pretty printer across all tests by default.
- // If desired, we could add a `should-fail-pretty` annotation.
- let should_panic = match config.mode {
- Pretty => test::ShouldPanic::No,
- _ => if early_props.should_fail {
- test::ShouldPanic::Yes
- } else {
- test::ShouldPanic::No
- }
- };
-
- test::TestDescAndFn {
- desc: test::TestDesc {
- name: make_test_name(config, testpaths),
- ignore: early_props.ignore,
- should_panic: should_panic,
- },
- testfn: make_test_closure(config, testpaths),
- }
-}
-
-pub fn make_test_name(config: &Config, testpaths: &TestPaths) -> test::TestName {
- // Convert a complete path to something like
- //
- // run-pass/foo/bar/baz.rs
- let path =
- PathBuf::from(config.mode.to_string())
- .join(&testpaths.relative_dir)
- .join(&testpaths.file.file_name().unwrap());
- test::DynTestName(format!("[{}] {}", config.mode, path.display()))
-}
-
-pub fn make_test_closure(config: &Config, testpaths: &TestPaths) -> test::TestFn {
- let config = config.clone();
- let testpaths = testpaths.clone();
- test::DynTestFn(Box::new(move || {
- runtest::run(config, &testpaths)
- }))
-}
-
-fn extract_gdb_version(full_version_line: Option<String>) -> Option<String> {
- match full_version_line {
- Some(ref full_version_line)
- if !full_version_line.trim().is_empty() => {
- let full_version_line = full_version_line.trim();
-
- // used to be a regex "(^|[^0-9])([0-9]\.[0-9]+)"
- for (pos, c) in full_version_line.char_indices() {
- if !c.is_digit(10) {
- continue
- }
- if pos + 2 >= full_version_line.len() {
- continue
- }
- if full_version_line[pos + 1..].chars().next().unwrap() != '.' {
- continue
- }
- if !full_version_line[pos + 2..].chars().next().unwrap().is_digit(10) {
- continue
- }
- if pos > 0 && full_version_line[..pos].chars().next_back()
- .unwrap().is_digit(10) {
- continue
- }
- let mut end = pos + 3;
- while end < full_version_line.len() &&
- full_version_line[end..].chars().next()
- .unwrap().is_digit(10) {
- end += 1;
- }
- return Some(full_version_line[pos..end].to_owned());
- }
- println!("Could not extract GDB version from line '{}'",
- full_version_line);
- None
- },
- _ => None
- }
-}
-
-fn extract_lldb_version(full_version_line: Option<String>) -> Option<String> {
- // Extract the major LLDB version from the given version string.
- // LLDB version strings are different for Apple and non-Apple platforms.
- // At the moment, this function only supports the Apple variant, which looks
- // like this:
- //
- // LLDB-179.5 (older versions)
- // lldb-300.2.51 (new versions)
- //
- // We are only interested in the major version number, so this function
- // will return `Some("179")` and `Some("300")` respectively.
-
- if let Some(ref full_version_line) = full_version_line {
- if !full_version_line.trim().is_empty() {
- let full_version_line = full_version_line.trim();
-
- for (pos, l) in full_version_line.char_indices() {
- if l != 'l' && l != 'L' { continue }
- if pos + 5 >= full_version_line.len() { continue }
- let l = full_version_line[pos + 1..].chars().next().unwrap();
- if l != 'l' && l != 'L' { continue }
- let d = full_version_line[pos + 2..].chars().next().unwrap();
- if d != 'd' && d != 'D' { continue }
- let b = full_version_line[pos + 3..].chars().next().unwrap();
- if b != 'b' && b != 'B' { continue }
- let dash = full_version_line[pos + 4..].chars().next().unwrap();
- if dash != '-' { continue }
-
- let vers = full_version_line[pos + 5..].chars().take_while(|c| {
- c.is_digit(10)
- }).collect::<String>();
- if !vers.is_empty() { return Some(vers) }
- }
- println!("Could not extract LLDB version from line '{}'",
- full_version_line);
- }
- }
- None
-}
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-use self::WhichLine::*;
-
-use std::fmt;
-use std::fs::File;
-use std::io::BufReader;
-use std::io::prelude::*;
-use std::path::Path;
-use std::str::FromStr;
-
-#[derive(Clone, Debug, PartialEq)]
-pub enum ErrorKind {
- Help,
- Error,
- Note,
- Suggestion,
- Warning,
-}
-
-impl FromStr for ErrorKind {
- type Err = ();
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- match &s.trim_right_matches(':') as &str {
- "HELP" => Ok(ErrorKind::Help),
- "ERROR" => Ok(ErrorKind::Error),
- "NOTE" => Ok(ErrorKind::Note),
- "SUGGESTION" => Ok(ErrorKind::Suggestion),
- "WARN" => Ok(ErrorKind::Warning),
- "WARNING" => Ok(ErrorKind::Warning),
- _ => Err(()),
- }
- }
-}
-
-impl fmt::Display for ErrorKind {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match *self {
- ErrorKind::Help => write!(f, "help"),
- ErrorKind::Error => write!(f, "error"),
- ErrorKind::Note => write!(f, "note"),
- ErrorKind::Suggestion => write!(f, "suggestion"),
- ErrorKind::Warning => write!(f, "warning"),
- }
- }
-}
-
-pub struct ExpectedError {
- pub line_num: usize,
- /// What kind of message we expect (e.g. warning, error, suggestion).
- /// `None` if not specified or unknown message kind.
- pub kind: Option<ErrorKind>,
- pub msg: String,
-}
-
-#[derive(PartialEq, Debug)]
-enum WhichLine { ThisLine, FollowPrevious(usize), AdjustBackward(usize) }
-
-/// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE"
-/// The former is a "follow" that inherits its target from the preceding line;
-/// the latter is an "adjusts" that goes that many lines up.
-///
-/// Goal is to enable tests both like: //~^^^ ERROR go up three
-/// and also //~^ ERROR message one for the preceding line, and
-/// //~| ERROR message two for that same line.
-///
-/// If cfg is not None (i.e., in an incremental test), then we look
-/// for `//[X]~` instead, where `X` is the current `cfg`.
-pub fn load_errors(testfile: &Path, cfg: Option<&str>) -> Vec<ExpectedError> {
- let rdr = BufReader::new(File::open(testfile).unwrap());
-
- // `last_nonfollow_error` tracks the most recently seen
- // line with an error template that did not use the
- // follow-syntax, "//~| ...".
- //
- // (pnkfelix could not find an easy way to compose Iterator::scan
- // and Iterator::filter_map to pass along this information into
- // `parse_expected`. So instead I am storing that state here and
- // updating it in the map callback below.)
- let mut last_nonfollow_error = None;
-
- let tag = match cfg {
- Some(rev) => format!("//[{}]~", rev),
- None => format!("//~")
- };
-
- rdr.lines()
- .enumerate()
- .filter_map(|(line_num, line)| {
- parse_expected(last_nonfollow_error,
- line_num + 1,
- &line.unwrap(),
- &tag)
- .map(|(which, error)| {
- match which {
- FollowPrevious(_) => {}
- _ => last_nonfollow_error = Some(error.line_num),
- }
- error
- })
- })
- .collect()
-}
-
-fn parse_expected(last_nonfollow_error: Option<usize>,
- line_num: usize,
- line: &str,
- tag: &str)
- -> Option<(WhichLine, ExpectedError)> {
- let start = match line.find(tag) { Some(i) => i, None => return None };
- let (follow, adjusts) = if line[start + tag.len()..].chars().next().unwrap() == '|' {
- (true, 0)
- } else {
- (false, line[start + tag.len()..].chars().take_while(|c| *c == '^').count())
- };
- let kind_start = start + tag.len() + adjusts + (follow as usize);
- let kind = line[kind_start..].split_whitespace()
- .next()
- .expect("Encountered unexpected empty comment")
- .parse::<ErrorKind>()
- .ok();
- let letters = line[kind_start..].chars();
- let msg = letters.skip_while(|c| c.is_whitespace())
- .skip_while(|c| !c.is_whitespace())
- .collect::<String>().trim().to_owned();
-
- let (which, line_num) = if follow {
- assert!(adjusts == 0, "use either //~| or //~^, not both.");
- let line_num = last_nonfollow_error.expect("encountered //~| without \
- preceding //~^ line.");
- (FollowPrevious(line_num), line_num)
- } else {
- let which =
- if adjusts > 0 { AdjustBackward(adjusts) } else { ThisLine };
- let line_num = line_num - adjusts;
- (which, line_num)
- };
-
- debug!("line={} tag={:?} which={:?} kind={:?} msg={:?}",
- line_num, tag, which, kind, msg);
- Some((which, ExpectedError { line_num: line_num,
- kind: kind,
- msg: msg, }))
-}
+++ /dev/null
-// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::env;
-use std::fs::File;
-use std::io::BufReader;
-use std::io::prelude::*;
-use std::path::{Path, PathBuf};
-
-use common::Config;
-use common;
-use util;
-
-#[derive(Clone, Debug)]
-pub struct TestProps {
- // Lines that should be expected, in order, on standard out
- pub error_patterns: Vec<String> ,
- // Extra flags to pass to the compiler
- pub compile_flags: Vec<String>,
- // Extra flags to pass when the compiled code is run (such as --bench)
- pub run_flags: Option<String>,
- // If present, the name of a file that this test should match when
- // pretty-printed
- pub pp_exact: Option<PathBuf>,
- // Modules from aux directory that should be compiled
- pub aux_builds: Vec<String> ,
- // Environment settings to use for compiling
- pub rustc_env: Vec<(String,String)> ,
- // Environment settings to use during execution
- pub exec_env: Vec<(String,String)> ,
- // Lines to check if they appear in the expected debugger output
- pub check_lines: Vec<String> ,
- // Build documentation for all specified aux-builds as well
- pub build_aux_docs: bool,
- // Flag to force a crate to be built with the host architecture
- pub force_host: bool,
- // Check stdout for error-pattern output as well as stderr
- pub check_stdout: bool,
- // Don't force a --crate-type=dylib flag on the command line
- pub no_prefer_dynamic: bool,
- // Run --pretty expanded when running pretty printing tests
- pub pretty_expanded: bool,
- // Which pretty mode are we testing with, default to 'normal'
- pub pretty_mode: String,
- // Only compare pretty output and don't try compiling
- pub pretty_compare_only: bool,
- // Patterns which must not appear in the output of a cfail test.
- pub forbid_output: Vec<String>,
- // Revisions to test for incremental compilation.
- pub revisions: Vec<String>,
-}
-
-// Load any test directives embedded in the file
-pub fn load_props(testfile: &Path) -> TestProps {
- let error_patterns = Vec::new();
- let aux_builds = Vec::new();
- let exec_env = Vec::new();
- let run_flags = None;
- let pp_exact = None;
- let check_lines = Vec::new();
- let build_aux_docs = false;
- let force_host = false;
- let check_stdout = false;
- let no_prefer_dynamic = false;
- let pretty_expanded = false;
- let pretty_compare_only = false;
- let forbid_output = Vec::new();
- let mut props = TestProps {
- error_patterns: error_patterns,
- compile_flags: vec![],
- run_flags: run_flags,
- pp_exact: pp_exact,
- aux_builds: aux_builds,
- revisions: vec![],
- rustc_env: vec![],
- exec_env: exec_env,
- check_lines: check_lines,
- build_aux_docs: build_aux_docs,
- force_host: force_host,
- check_stdout: check_stdout,
- no_prefer_dynamic: no_prefer_dynamic,
- pretty_expanded: pretty_expanded,
- pretty_mode: format!("normal"),
- pretty_compare_only: pretty_compare_only,
- forbid_output: forbid_output,
- };
- load_props_into(&mut props, testfile, None);
- props
-}
-
-/// Load properties from `testfile` into `props`. If a property is
-/// tied to a particular revision `foo` (indicated by writing
-/// `//[foo]`), then the property is ignored unless `cfg` is
-/// `Some("foo")`.
-pub fn load_props_into(props: &mut TestProps, testfile: &Path, cfg: Option<&str>) {
- iter_header(testfile, cfg, &mut |ln| {
- if let Some(ep) = parse_error_pattern(ln) {
- props.error_patterns.push(ep);
- }
-
- if let Some(flags) = parse_compile_flags(ln) {
- props.compile_flags.extend(
- flags
- .split_whitespace()
- .map(|s| s.to_owned()));
- }
-
- if let Some(r) = parse_revisions(ln) {
- props.revisions.extend(r);
- }
-
- if props.run_flags.is_none() {
- props.run_flags = parse_run_flags(ln);
- }
-
- if props.pp_exact.is_none() {
- props.pp_exact = parse_pp_exact(ln, testfile);
- }
-
- if !props.build_aux_docs {
- props.build_aux_docs = parse_build_aux_docs(ln);
- }
-
- if !props.force_host {
- props.force_host = parse_force_host(ln);
- }
-
- if !props.check_stdout {
- props.check_stdout = parse_check_stdout(ln);
- }
-
- if !props.no_prefer_dynamic {
- props.no_prefer_dynamic = parse_no_prefer_dynamic(ln);
- }
-
- if !props.pretty_expanded {
- props.pretty_expanded = parse_pretty_expanded(ln);
- }
-
- if let Some(m) = parse_pretty_mode(ln) {
- props.pretty_mode = m;
- }
-
- if !props.pretty_compare_only {
- props.pretty_compare_only = parse_pretty_compare_only(ln);
- }
-
- if let Some(ab) = parse_aux_build(ln) {
- props.aux_builds.push(ab);
- }
-
- if let Some(ee) = parse_env(ln, "exec-env") {
- props.exec_env.push(ee);
- }
-
- if let Some(ee) = parse_env(ln, "rustc-env") {
- props.rustc_env.push(ee);
- }
-
- if let Some(cl) = parse_check_line(ln) {
- props.check_lines.push(cl);
- }
-
- if let Some(of) = parse_forbid_output(ln) {
- props.forbid_output.push(of);
- }
- });
-
- for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
- match env::var(key) {
- Ok(val) =>
- if props.exec_env.iter().find(|&&(ref x, _)| *x == key).is_none() {
- props.exec_env.push((key.to_owned(), val))
- },
- Err(..) => {}
- }
- }
-}
-
-pub struct EarlyProps {
- pub ignore: bool,
- pub should_fail: bool,
-}
-
-// scan the file to detect whether the test should be ignored and
-// whether it should panic; these are two things the test runner needs
-// to know early, before actually running the test
-pub fn early_props(config: &Config, testfile: &Path) -> EarlyProps {
- let mut props = EarlyProps {
- ignore: false,
- should_fail: false,
- };
-
- iter_header(testfile, None, &mut |ln| {
- props.ignore =
- props.ignore ||
- parse_name_directive(ln, "ignore-test") ||
- parse_name_directive(ln, &ignore_target(config)) ||
- parse_name_directive(ln, &ignore_architecture(config)) ||
- parse_name_directive(ln, &ignore_stage(config)) ||
- parse_name_directive(ln, &ignore_env(config)) ||
- (config.mode == common::Pretty &&
- parse_name_directive(ln, "ignore-pretty")) ||
- (config.target != config.host &&
- parse_name_directive(ln, "ignore-cross-compile")) ||
- ignore_gdb(config, ln) ||
- ignore_lldb(config, ln);
-
- props.should_fail =
- props.should_fail ||
- parse_name_directive(ln, "should-fail");
- });
-
- return props;
-
- fn ignore_target(config: &Config) -> String {
- format!("ignore-{}", util::get_os(&config.target))
- }
- fn ignore_architecture(config: &Config) -> String {
- format!("ignore-{}", util::get_arch(&config.target))
- }
- fn ignore_stage(config: &Config) -> String {
- format!("ignore-{}",
- config.stage_id.split('-').next().unwrap())
- }
- fn ignore_env(config: &Config) -> String {
- format!("ignore-{}", util::get_env(&config.target).unwrap_or("<unknown>"))
- }
- fn ignore_gdb(config: &Config, line: &str) -> bool {
- if config.mode != common::DebugInfoGdb {
- return false;
- }
-
- if parse_name_directive(line, "ignore-gdb") {
- return true;
- }
-
- if let Some(ref actual_version) = config.gdb_version {
- if line.contains("min-gdb-version") {
- let min_version = line.trim()
- .split(' ')
- .last()
- .expect("Malformed GDB version directive");
- // Ignore if actual version is smaller the minimum required
- // version
- gdb_version_to_int(actual_version) <
- gdb_version_to_int(min_version)
- } else {
- false
- }
- } else {
- false
- }
- }
-
- fn ignore_lldb(config: &Config, line: &str) -> bool {
- if config.mode != common::DebugInfoLldb {
- return false;
- }
-
- if parse_name_directive(line, "ignore-lldb") {
- return true;
- }
-
- if let Some(ref actual_version) = config.lldb_version {
- if line.contains("min-lldb-version") {
- let min_version = line.trim()
- .split(' ')
- .last()
- .expect("Malformed lldb version directive");
- // Ignore if actual version is smaller the minimum required
- // version
- lldb_version_to_int(actual_version) <
- lldb_version_to_int(min_version)
- } else {
- false
- }
- } else {
- false
- }
- }
-}
-
-fn iter_header(testfile: &Path,
- cfg: Option<&str>,
- it: &mut FnMut(&str)) {
- let rdr = BufReader::new(File::open(testfile).unwrap());
- for ln in rdr.lines() {
- // Assume that any directives will be found before the first
- // module or function. This doesn't seem to be an optimization
- // with a warm page cache. Maybe with a cold one.
- let ln = ln.unwrap();
- let ln = ln.trim();
- if ln.starts_with("fn") || ln.starts_with("mod") {
- return;
- } else if ln.starts_with("//[") {
- // A comment like `//[foo]` is specific to revision `foo`
- if let Some(close_brace) = ln.find("]") {
- let lncfg = &ln[3..close_brace];
- let matches = match cfg {
- Some(s) => s == &lncfg[..],
- None => false,
- };
- if matches {
- it(&ln[close_brace+1..]);
- }
- } else {
- panic!("malformed condition directive: expected `//[foo]`, found `{}`",
- ln)
- }
- } else if ln.starts_with("//") {
- it(&ln[2..]);
- }
- }
- return;
-}
-
-fn parse_error_pattern(line: &str) -> Option<String> {
- parse_name_value_directive(line, "error-pattern")
-}
-
-fn parse_forbid_output(line: &str) -> Option<String> {
- parse_name_value_directive(line, "forbid-output")
-}
-
-fn parse_aux_build(line: &str) -> Option<String> {
- parse_name_value_directive(line, "aux-build")
-}
-
-fn parse_compile_flags(line: &str) -> Option<String> {
- parse_name_value_directive(line, "compile-flags")
-}
-
-fn parse_revisions(line: &str) -> Option<Vec<String>> {
- parse_name_value_directive(line, "revisions")
- .map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
-}
-
-fn parse_run_flags(line: &str) -> Option<String> {
- parse_name_value_directive(line, "run-flags")
-}
-
-fn parse_check_line(line: &str) -> Option<String> {
- parse_name_value_directive(line, "check")
-}
-
-fn parse_force_host(line: &str) -> bool {
- parse_name_directive(line, "force-host")
-}
-
-fn parse_build_aux_docs(line: &str) -> bool {
- parse_name_directive(line, "build-aux-docs")
-}
-
-fn parse_check_stdout(line: &str) -> bool {
- parse_name_directive(line, "check-stdout")
-}
-
-fn parse_no_prefer_dynamic(line: &str) -> bool {
- parse_name_directive(line, "no-prefer-dynamic")
-}
-
-fn parse_pretty_expanded(line: &str) -> bool {
- parse_name_directive(line, "pretty-expanded")
-}
-
-fn parse_pretty_mode(line: &str) -> Option<String> {
- parse_name_value_directive(line, "pretty-mode")
-}
-
-fn parse_pretty_compare_only(line: &str) -> bool {
- parse_name_directive(line, "pretty-compare-only")
-}
-
-fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
- parse_name_value_directive(line, name).map(|nv| {
- // nv is either FOO or FOO=BAR
- let mut strs: Vec<String> = nv
- .splitn(2, '=')
- .map(str::to_owned)
- .collect();
-
- match strs.len() {
- 1 => (strs.pop().unwrap(), "".to_owned()),
- 2 => {
- let end = strs.pop().unwrap();
- (strs.pop().unwrap(), end)
- }
- n => panic!("Expected 1 or 2 strings, not {}", n)
- }
- })
-}
-
-fn parse_pp_exact(line: &str, testfile: &Path) -> Option<PathBuf> {
- if let Some(s) = parse_name_value_directive(line, "pp-exact") {
- Some(PathBuf::from(&s))
- } else {
- if parse_name_directive(line, "pp-exact") {
- testfile.file_name().map(PathBuf::from)
- } else {
- None
- }
- }
-}
-
-fn parse_name_directive(line: &str, directive: &str) -> bool {
- // This 'no-' rule is a quick hack to allow pretty-expanded and no-pretty-expanded to coexist
- line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
-}
-
-pub fn parse_name_value_directive(line: &str, directive: &str)
- -> Option<String> {
- let keycolon = format!("{}:", directive);
- if let Some(colon) = line.find(&keycolon) {
- let value = line[(colon + keycolon.len()) .. line.len()].to_owned();
- debug!("{}: {}", directive, value);
- Some(value)
- } else {
- None
- }
-}
-
-pub fn gdb_version_to_int(version_string: &str) -> isize {
- let error_string = format!(
- "Encountered GDB version string with unexpected format: {}",
- version_string);
- let error_string = error_string;
-
- let components: Vec<&str> = version_string.trim().split('.').collect();
-
- if components.len() != 2 {
- panic!("{}", error_string);
- }
-
- let major: isize = components[0].parse().ok().expect(&error_string);
- let minor: isize = components[1].parse().ok().expect(&error_string);
-
- return major * 1000 + minor;
-}
-
-pub fn lldb_version_to_int(version_string: &str) -> isize {
- let error_string = format!(
- "Encountered LLDB version string with unexpected format: {}",
- version_string);
- let error_string = error_string;
- let major: isize = version_string.parse().ok().expect(&error_string);
- return major;
-}
+++ /dev/null
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::env;
-use std::ffi::OsString;
-use std::io::prelude::*;
-use std::path::PathBuf;
-use std::process::{ExitStatus, Command, Child, Output, Stdio};
-
-fn add_target_env(cmd: &mut Command, lib_path: &str, aux_path: Option<&str>) {
- // Need to be sure to put both the lib_path and the aux path in the dylib
- // search path for the child.
- let var = if cfg!(windows) {
- "PATH"
- } else if cfg!(target_os = "macos") {
- "DYLD_LIBRARY_PATH"
- } else {
- "LD_LIBRARY_PATH"
- };
- let mut path = env::split_paths(&env::var_os(var).unwrap_or(OsString::new()))
- .collect::<Vec<_>>();
- if let Some(p) = aux_path {
- path.insert(0, PathBuf::from(p))
- }
- path.insert(0, PathBuf::from(lib_path));
-
- // Add the new dylib search path var
- let newpath = env::join_paths(&path).unwrap();
- cmd.env(var, newpath);
-}
-
-pub struct Result {pub status: ExitStatus, pub out: String, pub err: String}
-
-pub fn run(lib_path: &str,
- prog: &str,
- aux_path: Option<&str>,
- args: &[String],
- env: Vec<(String, String)> ,
- input: Option<String>) -> Option<Result> {
-
- let mut cmd = Command::new(prog);
- cmd.args(args)
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped());
- add_target_env(&mut cmd, lib_path, aux_path);
- for (key, val) in env {
- cmd.env(&key, &val);
- }
-
- match cmd.spawn() {
- Ok(mut process) => {
- if let Some(input) = input {
- process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
- }
- let Output { status, stdout, stderr } =
- process.wait_with_output().unwrap();
-
- Some(Result {
- status: status,
- out: String::from_utf8(stdout).unwrap(),
- err: String::from_utf8(stderr).unwrap()
- })
- },
- Err(..) => None
- }
-}
-
-pub fn run_background(lib_path: &str,
- prog: &str,
- aux_path: Option<&str>,
- args: &[String],
- env: Vec<(String, String)> ,
- input: Option<String>) -> Option<Child> {
-
- let mut cmd = Command::new(prog);
- cmd.args(args)
- .stdin(Stdio::piped())
- .stdout(Stdio::piped())
- .stderr(Stdio::piped());
- add_target_env(&mut cmd, lib_path, aux_path);
- for (key, val) in env {
- cmd.env(&key, &val);
- }
-
- match cmd.spawn() {
- Ok(mut process) => {
- if let Some(input) = input {
- process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
- }
-
- Some(process)
- },
- Err(..) => None
- }
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-/// darwin_fd_limit exists to work around an issue where launchctl on Mac OS X
-/// defaults the rlimit maxfiles to 256/unlimited. The default soft limit of 256
-/// ends up being far too low for our multithreaded scheduler testing, depending
-/// on the number of cores available.
-///
-/// This fixes issue #7772.
-#[cfg(any(target_os = "macos", target_os = "ios"))]
-#[allow(non_camel_case_types)]
-pub unsafe fn raise_fd_limit() {
- use libc;
- use std::cmp;
- use std::io;
- use std::mem::size_of_val;
- use std::ptr::null_mut;
-
- static CTL_KERN: libc::c_int = 1;
- static KERN_MAXFILESPERPROC: libc::c_int = 29;
-
- // The strategy here is to fetch the current resource limits, read the
- // kern.maxfilesperproc sysctl value, and bump the soft resource limit for
- // maxfiles up to the sysctl value.
-
- // Fetch the kern.maxfilesperproc value
- let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC];
- let mut maxfiles: libc::c_int = 0;
- let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
- if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size,
- null_mut(), 0) != 0 {
- let err = io::Error::last_os_error();
- panic!("raise_fd_limit: error calling sysctl: {}", err);
- }
-
- // Fetch the current resource limits
- let mut rlim = libc::rlimit{rlim_cur: 0, rlim_max: 0};
- if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
- let err = io::Error::last_os_error();
- panic!("raise_fd_limit: error calling getrlimit: {}", err);
- }
-
- // Bump the soft limit to the smaller of kern.maxfilesperproc and the hard
- // limit
- rlim.rlim_cur = cmp::min(maxfiles as libc::rlim_t, rlim.rlim_max);
-
- // Set our newly-increased resource limit
- if libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) != 0 {
- let err = io::Error::last_os_error();
- panic!("raise_fd_limit: error calling setrlimit: {}", err);
- }
-}
-
-#[cfg(not(any(target_os = "macos", target_os = "ios")))]
-pub unsafe fn raise_fd_limit() {}
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use common::Config;
-use common::{CompileFail, ParseFail, Pretty, RunFail, RunPass, RunPassValgrind};
-use common::{Codegen, DebugInfoLldb, DebugInfoGdb, Rustdoc, CodegenUnits};
-use common::{Incremental};
-use errors::{self, ErrorKind};
-use header::TestProps;
-use header;
-use procsrv;
-use test::TestPaths;
-use util::logv;
-
-use std::env;
-use std::collections::HashSet;
-use std::fmt;
-use std::fs::{self, File};
-use std::io::BufReader;
-use std::io::prelude::*;
-use std::net::TcpStream;
-use std::path::{Path, PathBuf, Component};
-use std::process::{Command, Output, ExitStatus};
-
-pub fn run(config: Config, testpaths: &TestPaths) {
- match &*config.target {
-
- "arm-linux-androideabi" | "aarch64-linux-android" => {
- if !config.adb_device_status {
- panic!("android device not available");
- }
- }
-
- _=> { }
- }
-
- if config.verbose {
- // We're going to be dumping a lot of info. Start on a new line.
- print!("\n\n");
- }
- debug!("running {:?}", testpaths.file.display());
- let props = header::load_props(&testpaths.file);
- debug!("loaded props");
- match config.mode {
- CompileFail => run_cfail_test(&config, &props, &testpaths),
- ParseFail => run_cfail_test(&config, &props, &testpaths),
- RunFail => run_rfail_test(&config, &props, &testpaths),
- RunPass => run_rpass_test(&config, &props, &testpaths),
- RunPassValgrind => run_valgrind_test(&config, &props, &testpaths),
- Pretty => run_pretty_test(&config, &props, &testpaths),
- DebugInfoGdb => run_debuginfo_gdb_test(&config, &props, &testpaths),
- DebugInfoLldb => run_debuginfo_lldb_test(&config, &props, &testpaths),
- Codegen => run_codegen_test(&config, &props, &testpaths),
- Rustdoc => run_rustdoc_test(&config, &props, &testpaths),
- CodegenUnits => run_codegen_units_test(&config, &props, &testpaths),
- Incremental => run_incremental_test(&config, &props, &testpaths),
- }
-}
-
-fn get_output(props: &TestProps, proc_res: &ProcRes) -> String {
- if props.check_stdout {
- format!("{}{}", proc_res.stdout, proc_res.stderr)
- } else {
- proc_res.stderr.clone()
- }
-}
-
-
-fn for_each_revision<OP>(config: &Config, props: &TestProps, testpaths: &TestPaths,
- mut op: OP)
- where OP: FnMut(&Config, &TestProps, &TestPaths, Option<&str>)
-{
- if props.revisions.is_empty() {
- op(config, props, testpaths, None)
- } else {
- for revision in &props.revisions {
- let mut revision_props = props.clone();
- header::load_props_into(&mut revision_props,
- &testpaths.file,
- Some(&revision));
- revision_props.compile_flags.extend(vec![
- format!("--cfg"),
- format!("{}", revision),
- ]);
- op(config, &revision_props, testpaths, Some(revision));
- }
- }
-}
-
-fn run_cfail_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- for_each_revision(config, props, testpaths, run_cfail_test_revision);
-}
-
-fn run_cfail_test_revision(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- revision: Option<&str>) {
- let proc_res = compile_test(config, props, testpaths);
-
- if proc_res.status.success() {
- fatal_proc_rec(
- revision,
- &format!("{} test compiled successfully!", config.mode)[..],
- &proc_res);
- }
-
- check_correct_failure_status(revision, &proc_res);
-
- if proc_res.status.success() {
- fatal(revision, "process did not return an error status");
- }
-
- let output_to_check = get_output(props, &proc_res);
- let expected_errors = errors::load_errors(&testpaths.file, revision);
- if !expected_errors.is_empty() {
- if !props.error_patterns.is_empty() {
- fatal(revision, "both error pattern and expected errors specified");
- }
- check_expected_errors(revision, expected_errors, testpaths, &proc_res);
- } else {
- check_error_patterns(revision, props, testpaths, &output_to_check, &proc_res);
- }
- check_no_compiler_crash(revision, &proc_res);
- check_forbid_output(revision, props, &output_to_check, &proc_res);
-}
-
-fn run_rfail_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- for_each_revision(config, props, testpaths, run_rfail_test_revision);
-}
-
-fn run_rfail_test_revision(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- revision: Option<&str>) {
- let proc_res = compile_test(config, props, testpaths);
-
- if !proc_res.status.success() {
- fatal_proc_rec(revision, "compilation failed!", &proc_res);
- }
-
- let proc_res = exec_compiled_test(config, props, testpaths);
-
- // The value our Makefile configures valgrind to return on failure
- const VALGRIND_ERR: i32 = 100;
- if proc_res.status.code() == Some(VALGRIND_ERR) {
- fatal_proc_rec(revision, "run-fail test isn't valgrind-clean!", &proc_res);
- }
-
- let output_to_check = get_output(props, &proc_res);
- check_correct_failure_status(revision, &proc_res);
- check_error_patterns(revision, props, testpaths, &output_to_check, &proc_res);
-}
-
-fn check_correct_failure_status(revision: Option<&str>, proc_res: &ProcRes) {
- // The value the rust runtime returns on failure
- const RUST_ERR: i32 = 101;
- if proc_res.status.code() != Some(RUST_ERR) {
- fatal_proc_rec(
- revision,
- &format!("failure produced the wrong error: {}",
- proc_res.status),
- proc_res);
- }
-}
-
-fn run_rpass_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- for_each_revision(config, props, testpaths, run_rpass_test_revision);
-}
-
-fn run_rpass_test_revision(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- revision: Option<&str>) {
- let proc_res = compile_test(config, props, testpaths);
-
- if !proc_res.status.success() {
- fatal_proc_rec(revision, "compilation failed!", &proc_res);
- }
-
- let proc_res = exec_compiled_test(config, props, testpaths);
-
- if !proc_res.status.success() {
- fatal_proc_rec(revision, "test run failed!", &proc_res);
- }
-}
-
-fn run_valgrind_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- assert!(props.revisions.is_empty(), "revisions not relevant here");
-
- if config.valgrind_path.is_none() {
- assert!(!config.force_valgrind);
- return run_rpass_test(config, props, testpaths);
- }
-
- let mut proc_res = compile_test(config, props, testpaths);
-
- if !proc_res.status.success() {
- fatal_proc_rec(None, "compilation failed!", &proc_res);
- }
-
- let mut new_config = config.clone();
- new_config.runtool = new_config.valgrind_path.clone();
- proc_res = exec_compiled_test(&new_config, props, testpaths);
-
- if !proc_res.status.success() {
- fatal_proc_rec(None, "test run failed!", &proc_res);
- }
-}
-
-fn run_pretty_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- for_each_revision(config, props, testpaths, run_pretty_test_revision);
-}
-
-fn run_pretty_test_revision(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- revision: Option<&str>) {
- if props.pp_exact.is_some() {
- logv(config, "testing for exact pretty-printing".to_owned());
- } else {
- logv(config, "testing for converging pretty-printing".to_owned());
- }
-
- let rounds =
- match props.pp_exact { Some(_) => 1, None => 2 };
-
- let mut src = String::new();
- File::open(&testpaths.file).unwrap().read_to_string(&mut src).unwrap();
- let mut srcs = vec!(src);
-
- let mut round = 0;
- while round < rounds {
- logv(config, format!("pretty-printing round {} revision {:?}",
- round, revision));
- let proc_res = print_source(config,
- props,
- testpaths,
- srcs[round].to_owned(),
- &props.pretty_mode);
-
- if !proc_res.status.success() {
- fatal_proc_rec(revision,
- &format!("pretty-printing failed in round {} revision {:?}",
- round, revision),
- &proc_res);
- }
-
- let ProcRes{ stdout, .. } = proc_res;
- srcs.push(stdout);
- round += 1;
- }
-
- let mut expected = match props.pp_exact {
- Some(ref file) => {
- let filepath = testpaths.file.parent().unwrap().join(file);
- let mut s = String::new();
- File::open(&filepath).unwrap().read_to_string(&mut s).unwrap();
- s
- }
- None => { srcs[srcs.len() - 2].clone() }
- };
- let mut actual = srcs[srcs.len() - 1].clone();
-
- if props.pp_exact.is_some() {
- // Now we have to care about line endings
- let cr = "\r".to_owned();
- actual = actual.replace(&cr, "").to_owned();
- expected = expected.replace(&cr, "").to_owned();
- }
-
- compare_source(revision, &expected, &actual);
-
- // If we're only making sure that the output matches then just stop here
- if props.pretty_compare_only { return; }
-
- // Finally, let's make sure it actually appears to remain valid code
- let proc_res = typecheck_source(config, props, testpaths, actual);
- if !proc_res.status.success() {
- fatal_proc_rec(revision, "pretty-printed source does not typecheck", &proc_res);
- }
-
- if !props.pretty_expanded { return }
-
- // additionally, run `--pretty expanded` and try to build it.
- let proc_res = print_source(config, props, testpaths, srcs[round].clone(), "expanded");
- if !proc_res.status.success() {
- fatal_proc_rec(revision, "pretty-printing (expanded) failed", &proc_res);
- }
-
- let ProcRes{ stdout: expanded_src, .. } = proc_res;
- let proc_res = typecheck_source(config, props, testpaths, expanded_src);
- if !proc_res.status.success() {
- fatal_proc_rec(
- revision,
- "pretty-printed source (expanded) does not typecheck",
- &proc_res);
- }
-
- return;
-
- fn print_source(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- src: String,
- pretty_type: &str) -> ProcRes {
- let aux_dir = aux_output_dir_name(config, testpaths);
- compose_and_run(config,
- testpaths,
- make_pp_args(config,
- props,
- testpaths,
- pretty_type.to_owned()),
- props.exec_env.clone(),
- config.compile_lib_path.to_str().unwrap(),
- Some(aux_dir.to_str().unwrap()),
- Some(src))
- }
-
- fn make_pp_args(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- pretty_type: String) -> ProcArgs {
- let aux_dir = aux_output_dir_name(config, testpaths);
- // FIXME (#9639): This needs to handle non-utf8 paths
- let mut args = vec!("-".to_owned(),
- "-Zunstable-options".to_owned(),
- "--unpretty".to_owned(),
- pretty_type,
- format!("--target={}", config.target),
- "-L".to_owned(),
- aux_dir.to_str().unwrap().to_owned());
- args.extend(split_maybe_args(&config.target_rustcflags));
- args.extend(props.compile_flags.iter().cloned());
- return ProcArgs {
- prog: config.rustc_path.to_str().unwrap().to_owned(),
- args: args,
- };
- }
-
- fn compare_source(revision: Option<&str>, expected: &str, actual: &str) {
- if expected != actual {
- error(revision, "pretty-printed source does not match expected source");
- println!("\n\
-expected:\n\
-------------------------------------------\n\
-{}\n\
-------------------------------------------\n\
-actual:\n\
-------------------------------------------\n\
-{}\n\
-------------------------------------------\n\
-\n",
- expected, actual);
- panic!();
- }
- }
-
- fn typecheck_source(config: &Config, props: &TestProps,
- testpaths: &TestPaths, src: String) -> ProcRes {
- let args = make_typecheck_args(config, props, testpaths);
- compose_and_run_compiler(config, props, testpaths, args, Some(src))
- }
-
- fn make_typecheck_args(config: &Config, props: &TestProps, testpaths: &TestPaths) -> ProcArgs {
- let aux_dir = aux_output_dir_name(config, testpaths);
- let target = if props.force_host {
- &*config.host
- } else {
- &*config.target
- };
- // FIXME (#9639): This needs to handle non-utf8 paths
- let mut args = vec!("-".to_owned(),
- "-Zno-trans".to_owned(),
- format!("--target={}", target),
- "-L".to_owned(),
- config.build_base.to_str().unwrap().to_owned(),
- "-L".to_owned(),
- aux_dir.to_str().unwrap().to_owned());
- args.extend(split_maybe_args(&config.target_rustcflags));
- args.extend(props.compile_flags.iter().cloned());
- // FIXME (#9639): This needs to handle non-utf8 paths
- return ProcArgs {
- prog: config.rustc_path.to_str().unwrap().to_owned(),
- args: args,
- };
- }
-}
-
-fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- assert!(props.revisions.is_empty(), "revisions not relevant here");
-
- let mut config = Config {
- target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags),
- host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags),
- .. config.clone()
- };
-
- let config = &mut config;
- let DebuggerCommands {
- commands,
- check_lines,
- breakpoint_lines
- } = parse_debugger_commands(testpaths, "gdb");
- let mut cmds = commands.join("\n");
-
- // compile test file (it should have 'compile-flags:-g' in the header)
- let compiler_run_result = compile_test(config, props, testpaths);
- if !compiler_run_result.status.success() {
- fatal_proc_rec(None, "compilation failed!", &compiler_run_result);
- }
-
- let exe_file = make_exe_name(config, testpaths);
-
- let debugger_run_result;
- match &*config.target {
- "arm-linux-androideabi" | "aarch64-linux-android" => {
-
- cmds = cmds.replace("run", "continue");
-
- // write debugger script
- let mut script_str = String::with_capacity(2048);
- script_str.push_str(&format!("set charset {}\n", charset()));
- script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap()));
- script_str.push_str("target remote :5039\n");
- script_str.push_str(&format!("set solib-search-path \
- ./{}/stage2/lib/rustlib/{}/lib/\n",
- config.host, config.target));
- for line in &breakpoint_lines {
- script_str.push_str(&format!("break {:?}:{}\n",
- testpaths.file
- .file_name()
- .unwrap()
- .to_string_lossy(),
- *line)[..]);
- }
- script_str.push_str(&cmds);
- script_str.push_str("\nquit\n");
-
- debug!("script_str = {}", script_str);
- dump_output_file(config,
- testpaths,
- &script_str,
- "debugger.script");
-
-
- procsrv::run("",
- &config.adb_path,
- None,
- &[
- "push".to_owned(),
- exe_file.to_str().unwrap().to_owned(),
- config.adb_test_dir.clone()
- ],
- vec!(("".to_owned(), "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{:?}`", config.adb_path));
-
- procsrv::run("",
- &config.adb_path,
- None,
- &[
- "forward".to_owned(),
- "tcp:5039".to_owned(),
- "tcp:5039".to_owned()
- ],
- vec!(("".to_owned(), "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{:?}`", config.adb_path));
-
- let adb_arg = format!("export LD_LIBRARY_PATH={}; \
- gdbserver{} :5039 {}/{}",
- config.adb_test_dir.clone(),
- if config.target.contains("aarch64")
- {"64"} else {""},
- config.adb_test_dir.clone(),
- exe_file.file_name().unwrap().to_str()
- .unwrap());
-
- let mut process = procsrv::run_background("",
- &config.adb_path
- ,
- None,
- &[
- "shell".to_owned(),
- adb_arg.clone()
- ],
- vec!(("".to_owned(),
- "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{:?}`", config.adb_path));
- loop {
- //waiting 1 second for gdbserver start
- ::std::thread::sleep(::std::time::Duration::new(1,0));
- if TcpStream::connect("127.0.0.1:5039").is_ok() {
- break
- }
- }
-
- let tool_path = match config.android_cross_path.to_str() {
- Some(x) => x.to_owned(),
- None => fatal(None, "cannot find android cross path")
- };
-
- let debugger_script = make_out_name(config, testpaths, "debugger.script");
- // FIXME (#9639): This needs to handle non-utf8 paths
- let debugger_opts =
- vec!("-quiet".to_owned(),
- "-batch".to_owned(),
- "-nx".to_owned(),
- format!("-command={}", debugger_script.to_str().unwrap()));
-
- let mut gdb_path = tool_path;
- gdb_path.push_str(&format!("/bin/{}-gdb", config.target));
- let procsrv::Result {
- out,
- err,
- status
- } = procsrv::run("",
- &gdb_path,
- None,
- &debugger_opts,
- vec!(("".to_owned(), "".to_owned())),
- None)
- .expect(&format!("failed to exec `{:?}`", gdb_path));
- let cmdline = {
- let cmdline = make_cmdline("",
- &format!("{}-gdb", config.target),
- &debugger_opts);
- logv(config, format!("executing {}", cmdline));
- cmdline
- };
-
- debugger_run_result = ProcRes {
- status: Status::Normal(status),
- stdout: out,
- stderr: err,
- cmdline: cmdline
- };
- if process.kill().is_err() {
- println!("Adb process is already finished.");
- }
- }
-
- _=> {
- let rust_src_root = find_rust_src_root(config)
- .expect("Could not find Rust source root");
- let rust_pp_module_rel_path = Path::new("./src/etc");
- let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
- .to_str()
- .unwrap()
- .to_owned();
- // write debugger script
- let mut script_str = String::with_capacity(2048);
- script_str.push_str(&format!("set charset {}\n", charset()));
- script_str.push_str("show version\n");
-
- match config.gdb_version {
- Some(ref version) => {
- println!("NOTE: compiletest thinks it is using GDB version {}",
- version);
-
- if header::gdb_version_to_int(version) >
- header::gdb_version_to_int("7.4") {
- // Add the directory containing the pretty printers to
- // GDB's script auto loading safe path
- script_str.push_str(
- &format!("add-auto-load-safe-path {}\n",
- rust_pp_module_abs_path.replace(r"\", r"\\"))
- );
- }
- }
- _ => {
- println!("NOTE: compiletest does not know which version of \
- GDB it is using");
- }
- }
-
- // The following line actually doesn't have to do anything with
- // pretty printing, it just tells GDB to print values on one line:
- script_str.push_str("set print pretty off\n");
-
- // Add the pretty printer directory to GDB's source-file search path
- script_str.push_str(&format!("directory {}\n",
- rust_pp_module_abs_path));
-
- // Load the target executable
- script_str.push_str(&format!("file {}\n",
- exe_file.to_str().unwrap()
- .replace(r"\", r"\\")));
-
- // Add line breakpoints
- for line in &breakpoint_lines {
- script_str.push_str(&format!("break '{}':{}\n",
- testpaths.file.file_name().unwrap()
- .to_string_lossy(),
- *line));
- }
-
- script_str.push_str(&cmds);
- script_str.push_str("\nquit\n");
-
- debug!("script_str = {}", script_str);
- dump_output_file(config,
- testpaths,
- &script_str,
- "debugger.script");
-
- // run debugger script with gdb
- fn debugger() -> &'static str {
- if cfg!(windows) {"gdb.exe"} else {"gdb"}
- }
-
- let debugger_script = make_out_name(config, testpaths, "debugger.script");
-
- // FIXME (#9639): This needs to handle non-utf8 paths
- let debugger_opts =
- vec!("-quiet".to_owned(),
- "-batch".to_owned(),
- "-nx".to_owned(),
- format!("-command={}", debugger_script.to_str().unwrap()));
-
- let proc_args = ProcArgs {
- prog: debugger().to_owned(),
- args: debugger_opts,
- };
-
- let environment = vec![("PYTHONPATH".to_owned(), rust_pp_module_abs_path)];
-
- debugger_run_result = compose_and_run(config,
- testpaths,
- proc_args,
- environment,
- config.run_lib_path.to_str().unwrap(),
- None,
- None);
- }
- }
-
- if !debugger_run_result.status.success() {
- fatal(None, "gdb failed to execute");
- }
-
- check_debugger_output(&debugger_run_result, &check_lines);
-}
-
-fn find_rust_src_root(config: &Config) -> Option<PathBuf> {
- let mut path = config.src_base.clone();
- let path_postfix = Path::new("src/etc/lldb_batchmode.py");
-
- while path.pop() {
- if path.join(&path_postfix).is_file() {
- return Some(path);
- }
- }
-
- return None;
-}
-
-fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- assert!(props.revisions.is_empty(), "revisions not relevant here");
-
- if config.lldb_python_dir.is_none() {
- fatal(None, "Can't run LLDB test because LLDB's python path is not set.");
- }
-
- let mut config = Config {
- target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags),
- host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags),
- .. config.clone()
- };
-
- let config = &mut config;
-
- // compile test file (it should have 'compile-flags:-g' in the header)
- let compile_result = compile_test(config, props, testpaths);
- if !compile_result.status.success() {
- fatal_proc_rec(None, "compilation failed!", &compile_result);
- }
-
- let exe_file = make_exe_name(config, testpaths);
-
- match config.lldb_version {
- Some(ref version) => {
- println!("NOTE: compiletest thinks it is using LLDB version {}",
- version);
- }
- _ => {
- println!("NOTE: compiletest does not know which version of \
- LLDB it is using");
- }
- }
-
- // Parse debugger commands etc from test files
- let DebuggerCommands {
- commands,
- check_lines,
- breakpoint_lines,
- ..
- } = parse_debugger_commands(testpaths, "lldb");
-
- // Write debugger script:
- // We don't want to hang when calling `quit` while the process is still running
- let mut script_str = String::from("settings set auto-confirm true\n");
-
- // Make LLDB emit its version, so we have it documented in the test output
- script_str.push_str("version\n");
-
- // Switch LLDB into "Rust mode"
- let rust_src_root = find_rust_src_root(config)
- .expect("Could not find Rust source root");
- let rust_pp_module_rel_path = Path::new("./src/etc/lldb_rust_formatters.py");
- let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
- .to_str()
- .unwrap()
- .to_owned();
-
- script_str.push_str(&format!("command script import {}\n",
- &rust_pp_module_abs_path[..])[..]);
- script_str.push_str("type summary add --no-value ");
- script_str.push_str("--python-function lldb_rust_formatters.print_val ");
- script_str.push_str("-x \".*\" --category Rust\n");
- script_str.push_str("type category enable Rust\n");
-
- // Set breakpoints on every line that contains the string "#break"
- let source_file_name = testpaths.file.file_name().unwrap().to_string_lossy();
- for line in &breakpoint_lines {
- script_str.push_str(&format!("breakpoint set --file '{}' --line {}\n",
- source_file_name,
- line));
- }
-
- // Append the other commands
- for line in &commands {
- script_str.push_str(line);
- script_str.push_str("\n");
- }
-
- // Finally, quit the debugger
- script_str.push_str("\nquit\n");
-
- // Write the script into a file
- debug!("script_str = {}", script_str);
- dump_output_file(config,
- testpaths,
- &script_str,
- "debugger.script");
- let debugger_script = make_out_name(config, testpaths, "debugger.script");
-
- // Let LLDB execute the script via lldb_batchmode.py
- let debugger_run_result = run_lldb(config,
- testpaths,
- &exe_file,
- &debugger_script,
- &rust_src_root);
-
- if !debugger_run_result.status.success() {
- fatal_proc_rec(None, "Error while running LLDB", &debugger_run_result);
- }
-
- check_debugger_output(&debugger_run_result, &check_lines);
-
- fn run_lldb(config: &Config,
- testpaths: &TestPaths,
- test_executable: &Path,
- debugger_script: &Path,
- rust_src_root: &Path)
- -> ProcRes {
- // Prepare the lldb_batchmode which executes the debugger script
- let lldb_script_path = rust_src_root.join("src/etc/lldb_batchmode.py");
- cmd2procres(config,
- testpaths,
- Command::new(&config.python)
- .arg(&lldb_script_path)
- .arg(test_executable)
- .arg(debugger_script)
- .env("PYTHONPATH",
- config.lldb_python_dir.as_ref().unwrap()))
- }
-}
-
-fn cmd2procres(config: &Config, testpaths: &TestPaths, cmd: &mut Command)
- -> ProcRes {
- let (status, out, err) = match cmd.output() {
- Ok(Output { status, stdout, stderr }) => {
- (status,
- String::from_utf8(stdout).unwrap(),
- String::from_utf8(stderr).unwrap())
- },
- Err(e) => {
- fatal(None, &format!("Failed to setup Python process for \
- LLDB script: {}", e))
- }
- };
-
- dump_output(config, testpaths, &out, &err);
- ProcRes {
- status: Status::Normal(status),
- stdout: out,
- stderr: err,
- cmdline: format!("{:?}", cmd)
- }
-}
-
-struct DebuggerCommands {
- commands: Vec<String>,
- check_lines: Vec<String>,
- breakpoint_lines: Vec<usize>,
-}
-
-fn parse_debugger_commands(testpaths: &TestPaths, debugger_prefix: &str)
- -> DebuggerCommands {
- let command_directive = format!("{}-command", debugger_prefix);
- let check_directive = format!("{}-check", debugger_prefix);
-
- let mut breakpoint_lines = vec!();
- let mut commands = vec!();
- let mut check_lines = vec!();
- let mut counter = 1;
- let reader = BufReader::new(File::open(&testpaths.file).unwrap());
- for line in reader.lines() {
- match line {
- Ok(line) => {
- if line.contains("#break") {
- breakpoint_lines.push(counter);
- }
-
- header::parse_name_value_directive(
- &line,
- &command_directive).map(|cmd| {
- commands.push(cmd)
- });
-
- header::parse_name_value_directive(
- &line,
- &check_directive).map(|cmd| {
- check_lines.push(cmd)
- });
- }
- Err(e) => {
- fatal(None, &format!("Error while parsing debugger commands: {}", e))
- }
- }
- counter += 1;
- }
-
- DebuggerCommands {
- commands: commands,
- check_lines: check_lines,
- breakpoint_lines: breakpoint_lines,
- }
-}
-
-fn cleanup_debug_info_options(options: &Option<String>) -> Option<String> {
- if options.is_none() {
- return None;
- }
-
- // Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS.
- let options_to_remove = [
- "-O".to_owned(),
- "-g".to_owned(),
- "--debuginfo".to_owned()
- ];
- let new_options =
- split_maybe_args(options).into_iter()
- .filter(|x| !options_to_remove.contains(x))
- .collect::<Vec<String>>();
-
- Some(new_options.join(" "))
-}
-
-fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String]) {
- let num_check_lines = check_lines.len();
- if num_check_lines > 0 {
- // Allow check lines to leave parts unspecified (e.g., uninitialized
- // bits in the wrong case of an enum) with the notation "[...]".
- let check_fragments: Vec<Vec<String>> =
- check_lines.iter().map(|s| {
- s
- .trim()
- .split("[...]")
- .map(str::to_owned)
- .collect()
- }).collect();
- // check if each line in props.check_lines appears in the
- // output (in order)
- let mut i = 0;
- for line in debugger_run_result.stdout.lines() {
- let mut rest = line.trim();
- let mut first = true;
- let mut failed = false;
- for frag in &check_fragments[i] {
- let found = if first {
- if rest.starts_with(frag) {
- Some(0)
- } else {
- None
- }
- } else {
- rest.find(frag)
- };
- match found {
- None => {
- failed = true;
- break;
- }
- Some(i) => {
- rest = &rest[(i + frag.len())..];
- }
- }
- first = false;
- }
- if !failed && rest.is_empty() {
- i += 1;
- }
- if i == num_check_lines {
- // all lines checked
- break;
- }
- }
- if i != num_check_lines {
- fatal_proc_rec(None, &format!("line not found in debugger output: {}",
- check_lines.get(i).unwrap()),
- debugger_run_result);
- }
- }
-}
-
-fn check_error_patterns(revision: Option<&str>,
- props: &TestProps,
- testpaths: &TestPaths,
- output_to_check: &str,
- proc_res: &ProcRes) {
- if props.error_patterns.is_empty() {
- fatal(revision,
- &format!("no error pattern specified in {:?}",
- testpaths.file.display()));
- }
- let mut next_err_idx = 0;
- let mut next_err_pat = &props.error_patterns[next_err_idx];
- let mut done = false;
- for line in output_to_check.lines() {
- if line.contains(next_err_pat) {
- debug!("found error pattern {}", next_err_pat);
- next_err_idx += 1;
- if next_err_idx == props.error_patterns.len() {
- debug!("found all error patterns");
- done = true;
- break;
- }
- next_err_pat = &props.error_patterns[next_err_idx];
- }
- }
- if done { return; }
-
- let missing_patterns = &props.error_patterns[next_err_idx..];
- if missing_patterns.len() == 1 {
- fatal_proc_rec(
- revision,
- &format!("error pattern '{}' not found!", missing_patterns[0]),
- proc_res);
- } else {
- for pattern in missing_patterns {
- error(revision, &format!("error pattern '{}' not found!", *pattern));
- }
- fatal_proc_rec(revision, "multiple error patterns not found", proc_res);
- }
-}
-
-fn check_no_compiler_crash(revision: Option<&str>, proc_res: &ProcRes) {
- for line in proc_res.stderr.lines() {
- if line.starts_with("error: internal compiler error:") {
- fatal_proc_rec(revision,
- "compiler encountered internal error",
- proc_res);
- }
- }
-}
-
-fn check_forbid_output(revision: Option<&str>,
- props: &TestProps,
- output_to_check: &str,
- proc_res: &ProcRes) {
- for pat in &props.forbid_output {
- if output_to_check.contains(pat) {
- fatal_proc_rec(revision,
- "forbidden pattern found in compiler output",
- proc_res);
- }
- }
-}
-
-fn check_expected_errors(revision: Option<&str>,
- expected_errors: Vec<errors::ExpectedError>,
- testpaths: &TestPaths,
- proc_res: &ProcRes) {
- // true if we found the error in question
- let mut found_flags = vec![false; expected_errors.len()];
-
- if proc_res.status.success() {
- fatal_proc_rec(revision, "process did not return an error status", proc_res);
- }
-
- let prefixes = expected_errors.iter().map(|ee| {
- let expected = format!("{}:{}:", testpaths.file.display(), ee.line_num);
- // On windows just translate all '\' path separators to '/'
- expected.replace(r"\", "/")
- }).collect::<Vec<String>>();
-
- // If the testcase being checked contains at least one expected "help"
- // message, then we'll ensure that all "help" messages are expected.
- // Otherwise, all "help" messages reported by the compiler will be ignored.
- // This logic also applies to "note" messages.
- let (expect_help, expect_note) =
- expected_errors.iter()
- .fold((false, false),
- |(acc_help, acc_note), ee|
- (acc_help || ee.kind == Some(ErrorKind::Help),
- acc_note || ee.kind == Some(ErrorKind::Note)));
-
- // Scan and extract our error/warning messages,
- // which look like:
- // filename:line1:col1: line2:col2: *error:* msg
- // filename:line1:col1: line2:col2: *warning:* msg
- // where line1:col1: is the starting point, line2:col2:
- // is the ending point, and * represents ANSI color codes.
- //
- // This pattern is ambiguous on windows, because filename may contain
- // a colon, so any path prefix must be detected and removed first.
- let mut unexpected = 0;
- let mut not_found = 0;
- for line in proc_res.stderr.lines() {
- let mut was_expected = false;
- let mut prev = 0;
- for (i, ee) in expected_errors.iter().enumerate() {
- if !found_flags[i] {
- debug!("prefix={} ee.kind={:?} ee.msg={} line={}",
- prefixes[i],
- ee.kind,
- ee.msg,
- line);
- // Suggestions have no line number in their output, so take on the line number of
- // the previous expected error
- if ee.kind == Some(ErrorKind::Suggestion) {
- assert!(expected_errors[prev].kind == Some(ErrorKind::Help),
- "SUGGESTIONs must be preceded by a HELP");
- if line.contains(&ee.msg) {
- found_flags[i] = true;
- was_expected = true;
- break;
- }
- }
- if
- (prefix_matches(line, &prefixes[i]) || continuation(line)) &&
- (ee.kind.is_none() || line.contains(&ee.kind.as_ref().unwrap().to_string())) &&
- line.contains(&ee.msg)
- {
- found_flags[i] = true;
- was_expected = true;
- break;
- }
- }
- prev = i;
- }
-
- // ignore this msg which gets printed at the end
- if line.contains("aborting due to") {
- was_expected = true;
- }
-
- if !was_expected && is_unexpected_compiler_message(line, expect_help, expect_note) {
- error(revision, &format!("unexpected compiler message: '{}'", line));
- unexpected += 1;
- }
- }
-
- for (i, &flag) in found_flags.iter().enumerate() {
- if !flag {
- let ee = &expected_errors[i];
- error(revision, &format!("expected {} on line {} not found: {}",
- ee.kind.as_ref()
- .map_or("message".into(),
- |k| k.to_string()),
- ee.line_num, ee.msg));
- not_found += 1;
- }
- }
-
- if unexpected > 0 || not_found > 0 {
- fatal_proc_rec(
- revision,
- &format!("{} unexpected errors found, {} expected errors not found",
- unexpected, not_found),
- proc_res);
- }
-
- fn prefix_matches(line: &str, prefix: &str) -> bool {
- use std::ascii::AsciiExt;
- // On windows just translate all '\' path separators to '/'
- let line = line.replace(r"\", "/");
- if cfg!(windows) {
- line.to_ascii_lowercase().starts_with(&prefix.to_ascii_lowercase())
- } else {
- line.starts_with(prefix)
- }
- }
-
- // A multi-line error will have followup lines which start with a space
- // or open paren.
- fn continuation( line: &str) -> bool {
- line.starts_with(" ") || line.starts_with("(")
- }
-}
-
-fn is_unexpected_compiler_message(line: &str, expect_help: bool, expect_note: bool) -> bool {
- let mut c = Path::new(line).components();
- let line = match c.next() {
- Some(Component::Prefix(_)) => c.as_path().to_str().unwrap(),
- _ => line,
- };
-
- let mut i = 0;
- return scan_until_char(line, ':', &mut i) &&
- scan_char(line, ':', &mut i) &&
- scan_integer(line, &mut i) &&
- scan_char(line, ':', &mut i) &&
- scan_integer(line, &mut i) &&
- scan_char(line, ':', &mut i) &&
- scan_char(line, ' ', &mut i) &&
- scan_integer(line, &mut i) &&
- scan_char(line, ':', &mut i) &&
- scan_integer(line, &mut i) &&
- scan_char(line, ' ', &mut i) &&
- (scan_string(line, "error", &mut i) ||
- scan_string(line, "warning", &mut i) ||
- (expect_help && scan_string(line, "help", &mut i)) ||
- (expect_note && scan_string(line, "note", &mut i))
- );
-}
-
-fn scan_until_char(haystack: &str, needle: char, idx: &mut usize) -> bool {
- if *idx >= haystack.len() {
- return false;
- }
- let opt = haystack[(*idx)..].find(needle);
- if opt.is_none() {
- return false;
- }
- *idx = opt.unwrap();
- return true;
-}
-
-fn scan_char(haystack: &str, needle: char, idx: &mut usize) -> bool {
- if *idx >= haystack.len() {
- return false;
- }
- let ch = haystack[*idx..].chars().next().unwrap();
- if ch != needle {
- return false;
- }
- *idx += ch.len_utf8();
- return true;
-}
-
-fn scan_integer(haystack: &str, idx: &mut usize) -> bool {
- let mut i = *idx;
- while i < haystack.len() {
- let ch = haystack[i..].chars().next().unwrap();
- if ch < '0' || '9' < ch {
- break;
- }
- i += ch.len_utf8();
- }
- if i == *idx {
- return false;
- }
- *idx = i;
- return true;
-}
-
-fn scan_string(haystack: &str, needle: &str, idx: &mut usize) -> bool {
- let mut haystack_i = *idx;
- let mut needle_i = 0;
- while needle_i < needle.len() {
- if haystack_i >= haystack.len() {
- return false;
- }
- let ch = haystack[haystack_i..].chars().next().unwrap();
- haystack_i += ch.len_utf8();
- if !scan_char(needle, ch, &mut needle_i) {
- return false;
- }
- }
- *idx = haystack_i;
- return true;
-}
-
-struct ProcArgs {
- prog: String,
- args: Vec<String>,
-}
-
-struct ProcRes {
- status: Status,
- stdout: String,
- stderr: String,
- cmdline: String,
-}
-
-enum Status {
- Parsed(i32),
- Normal(ExitStatus),
-}
-
-impl Status {
- fn code(&self) -> Option<i32> {
- match *self {
- Status::Parsed(i) => Some(i),
- Status::Normal(ref e) => e.code(),
- }
- }
-
- fn success(&self) -> bool {
- match *self {
- Status::Parsed(i) => i == 0,
- Status::Normal(ref e) => e.success(),
- }
- }
-}
-
-impl fmt::Display for Status {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match *self {
- Status::Parsed(i) => write!(f, "exit code: {}", i),
- Status::Normal(ref e) => e.fmt(f),
- }
- }
-}
-
-fn compile_test(config: &Config, props: &TestProps,
- testpaths: &TestPaths) -> ProcRes {
- let aux_dir = aux_output_dir_name(config, testpaths);
- // FIXME (#9639): This needs to handle non-utf8 paths
- let link_args = vec!("-L".to_owned(),
- aux_dir.to_str().unwrap().to_owned());
- let args = make_compile_args(config,
- props,
- link_args,
- |a, b| TargetLocation::ThisFile(make_exe_name(a, b)), testpaths);
- compose_and_run_compiler(config, props, testpaths, args, None)
-}
-
-fn document(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- out_dir: &Path)
- -> ProcRes {
- if props.build_aux_docs {
- for rel_ab in &props.aux_builds {
- let aux_testpaths = compute_aux_test_paths(config, testpaths, rel_ab);
- let aux_props = header::load_props(&aux_testpaths.file);
- let auxres = document(config, &aux_props, &aux_testpaths, out_dir);
- if !auxres.status.success() {
- return auxres;
- }
- }
- }
-
- let aux_dir = aux_output_dir_name(config, testpaths);
- let mut args = vec!["-L".to_owned(),
- aux_dir.to_str().unwrap().to_owned(),
- "-o".to_owned(),
- out_dir.to_str().unwrap().to_owned(),
- testpaths.file.to_str().unwrap().to_owned()];
- args.extend(props.compile_flags.iter().cloned());
- let args = ProcArgs {
- prog: config.rustdoc_path.to_str().unwrap().to_owned(),
- args: args,
- };
- compose_and_run_compiler(config, props, testpaths, args, None)
-}
-
-fn exec_compiled_test(config: &Config, props: &TestProps,
- testpaths: &TestPaths) -> ProcRes {
-
- let env = props.exec_env.clone();
-
- match &*config.target {
-
- "arm-linux-androideabi" | "aarch64-linux-android" => {
- _arm_exec_compiled_test(config, props, testpaths, env)
- }
-
- _=> {
- let aux_dir = aux_output_dir_name(config, testpaths);
- compose_and_run(config,
- testpaths,
- make_run_args(config, props, testpaths),
- env,
- config.run_lib_path.to_str().unwrap(),
- Some(aux_dir.to_str().unwrap()),
- None)
- }
- }
-}
-
-fn compute_aux_test_paths(config: &Config,
- testpaths: &TestPaths,
- rel_ab: &str)
- -> TestPaths
-{
- let abs_ab = config.aux_base.join(rel_ab);
- TestPaths {
- file: abs_ab,
- base: testpaths.base.clone(),
- relative_dir: Path::new(rel_ab).parent()
- .map(|p| p.to_path_buf())
- .unwrap_or_else(|| PathBuf::new())
- }
-}
-
-fn compose_and_run_compiler(config: &Config, props: &TestProps,
- testpaths: &TestPaths, args: ProcArgs,
- input: Option<String>) -> ProcRes {
- if !props.aux_builds.is_empty() {
- ensure_dir(&aux_output_dir_name(config, testpaths));
- }
-
- let aux_dir = aux_output_dir_name(config, testpaths);
- // FIXME (#9639): This needs to handle non-utf8 paths
- let extra_link_args = vec!["-L".to_owned(),
- aux_dir.to_str().unwrap().to_owned()];
-
- for rel_ab in &props.aux_builds {
- let aux_testpaths = compute_aux_test_paths(config, testpaths, rel_ab);
- let aux_props = header::load_props(&aux_testpaths.file);
- let mut crate_type = if aux_props.no_prefer_dynamic {
- Vec::new()
- } else {
- // We primarily compile all auxiliary libraries as dynamic libraries
- // to avoid code size bloat and large binaries as much as possible
- // for the test suite (otherwise including libstd statically in all
- // executables takes up quite a bit of space).
- //
- // For targets like MUSL or Emscripten, however, there is no support for
- // dynamic libraries so we just go back to building a normal library. Note,
- // however, that for MUSL if the library is built with `force_host` then
- // it's ok to be a dylib as the host should always support dylibs.
- if (config.target.contains("musl") && !aux_props.force_host) ||
- config.target.contains("emscripten")
- {
- vec!("--crate-type=lib".to_owned())
- } else {
- vec!("--crate-type=dylib".to_owned())
- }
- };
- crate_type.extend(extra_link_args.clone());
- let aux_args =
- make_compile_args(config,
- &aux_props,
- crate_type,
- |a,b| {
- let f = make_lib_name(a, &b.file, testpaths);
- let parent = f.parent().unwrap();
- TargetLocation::ThisDirectory(parent.to_path_buf())
- },
- &aux_testpaths);
- let auxres = compose_and_run(config,
- &aux_testpaths,
- aux_args,
- Vec::new(),
- config.compile_lib_path.to_str().unwrap(),
- Some(aux_dir.to_str().unwrap()),
- None);
- if !auxres.status.success() {
- fatal_proc_rec(
- None,
- &format!("auxiliary build of {:?} failed to compile: ",
- aux_testpaths.file.display()),
- &auxres);
- }
-
- match &*config.target {
- "arm-linux-androideabi" | "aarch64-linux-android" => {
- _arm_push_aux_shared_library(config, testpaths);
- }
- _ => {}
- }
- }
-
- compose_and_run(config,
- testpaths,
- args,
- props.rustc_env.clone(),
- config.compile_lib_path.to_str().unwrap(),
- Some(aux_dir.to_str().unwrap()),
- input)
-}
-
-fn ensure_dir(path: &Path) {
- if path.is_dir() { return; }
- fs::create_dir_all(path).unwrap();
-}
-
-fn compose_and_run(config: &Config,
- testpaths: &TestPaths,
- ProcArgs{ args, prog }: ProcArgs,
- procenv: Vec<(String, String)> ,
- lib_path: &str,
- aux_path: Option<&str>,
- input: Option<String>) -> ProcRes {
- return program_output(config, testpaths, lib_path,
- prog, aux_path, args, procenv, input);
-}
-
-enum TargetLocation {
- ThisFile(PathBuf),
- ThisDirectory(PathBuf),
-}
-
-fn make_compile_args<F>(config: &Config,
- props: &TestProps,
- extras: Vec<String> ,
- xform: F,
- testpaths: &TestPaths)
- -> ProcArgs where
- F: FnOnce(&Config, &TestPaths) -> TargetLocation,
-{
- let xform_file = xform(config, testpaths);
- let target = if props.force_host {
- &*config.host
- } else {
- &*config.target
- };
- // FIXME (#9639): This needs to handle non-utf8 paths
- let mut args = vec!(testpaths.file.to_str().unwrap().to_owned(),
- "-L".to_owned(),
- config.build_base.to_str().unwrap().to_owned(),
- format!("--target={}", target));
- args.extend_from_slice(&extras);
- if !props.no_prefer_dynamic {
- args.push("-C".to_owned());
- args.push("prefer-dynamic".to_owned());
- }
- let path = match xform_file {
- TargetLocation::ThisFile(path) => {
- args.push("-o".to_owned());
- path
- }
- TargetLocation::ThisDirectory(path) => {
- args.push("--out-dir".to_owned());
- path
- }
- };
- args.push(path.to_str().unwrap().to_owned());
- if props.force_host {
- args.extend(split_maybe_args(&config.host_rustcflags));
- } else {
- args.extend(split_maybe_args(&config.target_rustcflags));
- }
- args.extend(props.compile_flags.iter().cloned());
- return ProcArgs {
- prog: config.rustc_path.to_str().unwrap().to_owned(),
- args: args,
- };
-}
-
-fn make_lib_name(config: &Config, auxfile: &Path, testpaths: &TestPaths) -> PathBuf {
- // what we return here is not particularly important, as it
- // happens; rustc ignores everything except for the directory.
- let auxname = output_testname(auxfile);
- aux_output_dir_name(config, testpaths).join(&auxname)
-}
-
-fn make_exe_name(config: &Config, testpaths: &TestPaths) -> PathBuf {
- let mut f = output_base_name(config, testpaths);
- // FIXME: This is using the host architecture exe suffix, not target!
- if config.target == "asmjs-unknown-emscripten" {
- let mut fname = f.file_name().unwrap().to_os_string();
- fname.push(".js");
- f.set_file_name(&fname);
- } else if !env::consts::EXE_SUFFIX.is_empty() {
- let mut fname = f.file_name().unwrap().to_os_string();
- fname.push(env::consts::EXE_SUFFIX);
- f.set_file_name(&fname);
- }
- f
-}
-
-fn make_run_args(config: &Config, props: &TestProps, testpaths: &TestPaths)
- -> ProcArgs {
- // If we've got another tool to run under (valgrind),
- // then split apart its command
- let mut args = split_maybe_args(&config.runtool);
-
- // If this is emscripten, then run tests under nodejs
- if config.target == "asmjs-unknown-emscripten" {
- args.push("nodejs".to_owned());
- }
-
- let exe_file = make_exe_name(config, testpaths);
-
- // FIXME (#9639): This needs to handle non-utf8 paths
- args.push(exe_file.to_str().unwrap().to_owned());
-
- // Add the arguments in the run_flags directive
- args.extend(split_maybe_args(&props.run_flags));
-
- let prog = args.remove(0);
- return ProcArgs {
- prog: prog,
- args: args,
- };
-}
-
-fn split_maybe_args(argstr: &Option<String>) -> Vec<String> {
- match *argstr {
- Some(ref s) => {
- s
- .split(' ')
- .filter_map(|s| {
- if s.chars().all(|c| c.is_whitespace()) {
- None
- } else {
- Some(s.to_owned())
- }
- }).collect()
- }
- None => Vec::new()
- }
-}
-
-fn program_output(config: &Config, testpaths: &TestPaths, lib_path: &str, prog: String,
- aux_path: Option<&str>, args: Vec<String>,
- env: Vec<(String, String)>,
- input: Option<String>) -> ProcRes {
- let cmdline =
- {
- let cmdline = make_cmdline(lib_path,
- &prog,
- &args);
- logv(config, format!("executing {}", cmdline));
- cmdline
- };
- let procsrv::Result {
- out,
- err,
- status
- } = procsrv::run(lib_path,
- &prog,
- aux_path,
- &args,
- env,
- input).expect(&format!("failed to exec `{}`", prog));
- dump_output(config, testpaths, &out, &err);
- return ProcRes {
- status: Status::Normal(status),
- stdout: out,
- stderr: err,
- cmdline: cmdline,
- };
-}
-
-fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String {
- use util;
-
- // Linux and mac don't require adjusting the library search path
- if cfg!(unix) {
- format!("{} {}", prog, args.join(" "))
- } else {
- // Build the LD_LIBRARY_PATH variable as it would be seen on the command line
- // for diagnostic purposes
- fn lib_path_cmd_prefix(path: &str) -> String {
- format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
- }
-
- format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.join(" "))
- }
-}
-
-fn dump_output(config: &Config, testpaths: &TestPaths, out: &str, err: &str) {
- dump_output_file(config, testpaths, out, "out");
- dump_output_file(config, testpaths, err, "err");
- maybe_dump_to_stdout(config, out, err);
-}
-
-fn dump_output_file(config: &Config,
- testpaths: &TestPaths,
- out: &str,
- extension: &str) {
- let outfile = make_out_name(config, testpaths, extension);
- File::create(&outfile).unwrap().write_all(out.as_bytes()).unwrap();
-}
-
-fn make_out_name(config: &Config, testpaths: &TestPaths, extension: &str) -> PathBuf {
- output_base_name(config, testpaths).with_extension(extension)
-}
-
-fn aux_output_dir_name(config: &Config, testpaths: &TestPaths) -> PathBuf {
- let f = output_base_name(config, testpaths);
- let mut fname = f.file_name().unwrap().to_os_string();
- fname.push(&format!(".{}.libaux", config.mode));
- f.with_file_name(&fname)
-}
-
-fn output_testname(filepath: &Path) -> PathBuf {
- PathBuf::from(filepath.file_stem().unwrap())
-}
-
-fn output_base_name(config: &Config, testpaths: &TestPaths) -> PathBuf {
- let dir = config.build_base.join(&testpaths.relative_dir);
-
- // Note: The directory `dir` is created during `collect_tests_from_dir`
- dir
- .join(&output_testname(&testpaths.file))
- .with_extension(&config.stage_id)
-}
-
-fn maybe_dump_to_stdout(config: &Config, out: &str, err: &str) {
- if config.verbose {
- println!("------{}------------------------------", "stdout");
- println!("{}", out);
- println!("------{}------------------------------", "stderr");
- println!("{}", err);
- println!("------------------------------------------");
- }
-}
-
-fn error(revision: Option<&str>, err: &str) {
- match revision {
- Some(rev) => println!("\nerror in revision `{}`: {}", rev, err),
- None => println!("\nerror: {}", err)
- }
-}
-
-fn fatal(revision: Option<&str>, err: &str) -> ! {
- error(revision, err); panic!();
-}
-
-fn fatal_proc_rec(revision: Option<&str>, err: &str, proc_res: &ProcRes) -> ! {
- error(revision, err);
- print!("\
-status: {}\n\
-command: {}\n\
-stdout:\n\
-------------------------------------------\n\
-{}\n\
-------------------------------------------\n\
-stderr:\n\
-------------------------------------------\n\
-{}\n\
-------------------------------------------\n\
-\n",
- proc_res.status, proc_res.cmdline, proc_res.stdout,
- proc_res.stderr);
- panic!();
-}
-
-fn _arm_exec_compiled_test(config: &Config,
- props: &TestProps,
- testpaths: &TestPaths,
- env: Vec<(String, String)>)
- -> ProcRes {
- let args = make_run_args(config, props, testpaths);
- let cmdline = make_cmdline("",
- &args.prog,
- &args.args);
-
- // get bare program string
- let mut tvec: Vec<String> = args.prog
- .split('/')
- .map(str::to_owned)
- .collect();
- let prog_short = tvec.pop().unwrap();
-
- // copy to target
- let copy_result = procsrv::run("",
- &config.adb_path,
- None,
- &[
- "push".to_owned(),
- args.prog.clone(),
- config.adb_test_dir.clone()
- ],
- vec!(("".to_owned(), "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", config.adb_path));
-
- if config.verbose {
- println!("push ({}) {} {} {}",
- config.target,
- args.prog,
- copy_result.out,
- copy_result.err);
- }
-
- logv(config, format!("executing ({}) {}", config.target, cmdline));
-
- let mut runargs = Vec::new();
-
- // run test via adb_run_wrapper
- runargs.push("shell".to_owned());
- for (key, val) in env {
- runargs.push(format!("{}={}", key, val));
- }
- runargs.push(format!("{}/../adb_run_wrapper.sh", config.adb_test_dir));
- runargs.push(format!("{}", config.adb_test_dir));
- runargs.push(format!("{}", prog_short));
-
- for tv in &args.args {
- runargs.push(tv.to_owned());
- }
- procsrv::run("",
- &config.adb_path,
- None,
- &runargs,
- vec!(("".to_owned(), "".to_owned())), Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", config.adb_path));
-
- // get exitcode of result
- runargs = Vec::new();
- runargs.push("shell".to_owned());
- runargs.push("cat".to_owned());
- runargs.push(format!("{}/{}.exitcode", config.adb_test_dir, prog_short));
-
- let procsrv::Result{ out: exitcode_out, err: _, status: _ } =
- procsrv::run("",
- &config.adb_path,
- None,
- &runargs,
- vec!(("".to_owned(), "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", config.adb_path));
-
- let mut exitcode: i32 = 0;
- for c in exitcode_out.chars() {
- if !c.is_numeric() { break; }
- exitcode = exitcode * 10 + match c {
- '0' ... '9' => c as i32 - ('0' as i32),
- _ => 101,
- }
- }
-
- // get stdout of result
- runargs = Vec::new();
- runargs.push("shell".to_owned());
- runargs.push("cat".to_owned());
- runargs.push(format!("{}/{}.stdout", config.adb_test_dir, prog_short));
-
- let procsrv::Result{ out: stdout_out, err: _, status: _ } =
- procsrv::run("",
- &config.adb_path,
- None,
- &runargs,
- vec!(("".to_owned(), "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", config.adb_path));
-
- // get stderr of result
- runargs = Vec::new();
- runargs.push("shell".to_owned());
- runargs.push("cat".to_owned());
- runargs.push(format!("{}/{}.stderr", config.adb_test_dir, prog_short));
-
- let procsrv::Result{ out: stderr_out, err: _, status: _ } =
- procsrv::run("",
- &config.adb_path,
- None,
- &runargs,
- vec!(("".to_owned(), "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", config.adb_path));
-
- dump_output(config,
- testpaths,
- &stdout_out,
- &stderr_out);
-
- ProcRes {
- status: Status::Parsed(exitcode),
- stdout: stdout_out,
- stderr: stderr_out,
- cmdline: cmdline
- }
-}
-
-fn _arm_push_aux_shared_library(config: &Config, testpaths: &TestPaths) {
- let tdir = aux_output_dir_name(config, testpaths);
-
- let dirs = fs::read_dir(&tdir).unwrap();
- for file in dirs {
- let file = file.unwrap().path();
- if file.extension().and_then(|s| s.to_str()) == Some("so") {
- // FIXME (#9639): This needs to handle non-utf8 paths
- let copy_result = procsrv::run("",
- &config.adb_path,
- None,
- &[
- "push".to_owned(),
- file.to_str()
- .unwrap()
- .to_owned(),
- config.adb_test_dir.to_owned(),
- ],
- vec!(("".to_owned(),
- "".to_owned())),
- Some("".to_owned()))
- .expect(&format!("failed to exec `{}`", config.adb_path));
-
- if config.verbose {
- println!("push ({}) {:?} {} {}",
- config.target, file.display(),
- copy_result.out, copy_result.err);
- }
- }
- }
-}
-
-// codegen tests (using FileCheck)
-
-fn compile_test_and_save_ir(config: &Config, props: &TestProps,
- testpaths: &TestPaths) -> ProcRes {
- let aux_dir = aux_output_dir_name(config, testpaths);
- // FIXME (#9639): This needs to handle non-utf8 paths
- let mut link_args = vec!("-L".to_owned(),
- aux_dir.to_str().unwrap().to_owned());
- let llvm_args = vec!("--emit=llvm-ir".to_owned(),);
- link_args.extend(llvm_args);
- let args = make_compile_args(config,
- props,
- link_args,
- |a, b| TargetLocation::ThisDirectory(
- output_base_name(a, b).parent()
- .unwrap().to_path_buf()),
- testpaths);
- compose_and_run_compiler(config, props, testpaths, args, None)
-}
-
-fn check_ir_with_filecheck(config: &Config, testpaths: &TestPaths) -> ProcRes {
- let irfile = output_base_name(config, testpaths).with_extension("ll");
- let prog = config.llvm_bin_path.as_ref().unwrap().join("FileCheck");
- let proc_args = ProcArgs {
- // FIXME (#9639): This needs to handle non-utf8 paths
- prog: prog.to_str().unwrap().to_owned(),
- args: vec!(format!("-input-file={}", irfile.to_str().unwrap()),
- testpaths.file.to_str().unwrap().to_owned())
- };
- compose_and_run(config, testpaths, proc_args, Vec::new(), "", None, None)
-}
-
-fn run_codegen_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- assert!(props.revisions.is_empty(), "revisions not relevant here");
-
- if config.llvm_bin_path.is_none() {
- fatal(None, "missing --llvm-bin-path");
- }
-
- let mut proc_res = compile_test_and_save_ir(config, props, testpaths);
- if !proc_res.status.success() {
- fatal_proc_rec(None, "compilation failed!", &proc_res);
- }
-
- proc_res = check_ir_with_filecheck(config, testpaths);
- if !proc_res.status.success() {
- fatal_proc_rec(None,
- "verification with 'FileCheck' failed",
- &proc_res);
- }
-}
-
-fn charset() -> &'static str {
- // FreeBSD 10.1 defaults to GDB 6.1.1 which doesn't support "auto" charset
- if cfg!(target_os = "bitrig") {
- "auto"
- } else if cfg!(target_os = "freebsd") {
- "ISO-8859-1"
- } else {
- "UTF-8"
- }
-}
-
-fn run_rustdoc_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- assert!(props.revisions.is_empty(), "revisions not relevant here");
-
- let out_dir = output_base_name(config, testpaths);
- let _ = fs::remove_dir_all(&out_dir);
- ensure_dir(&out_dir);
-
- let proc_res = document(config, props, testpaths, &out_dir);
- if !proc_res.status.success() {
- fatal_proc_rec(None, "rustdoc failed!", &proc_res);
- }
- let root = find_rust_src_root(config).unwrap();
-
- let res = cmd2procres(config,
- testpaths,
- Command::new(&config.python)
- .arg(root.join("src/etc/htmldocck.py"))
- .arg(out_dir)
- .arg(&testpaths.file));
- if !res.status.success() {
- fatal_proc_rec(None, "htmldocck failed!", &res);
- }
-}
-
-fn run_codegen_units_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
-
- assert!(props.revisions.is_empty(), "revisions not relevant here");
-
- let proc_res = compile_test(config, props, testpaths);
-
- if !proc_res.status.success() {
- fatal_proc_rec(None, "compilation failed!", &proc_res);
- }
-
- check_no_compiler_crash(None, &proc_res);
-
- const PREFIX: &'static str = "TRANS_ITEM ";
- const CGU_MARKER: &'static str = "@@";
-
- let actual: Vec<TransItem> = proc_res
- .stdout
- .lines()
- .filter(|line| line.starts_with(PREFIX))
- .map(str_to_trans_item)
- .collect();
-
- let expected: Vec<TransItem> = errors::load_errors(&testpaths.file, None)
- .iter()
- .map(|e| str_to_trans_item(&e.msg[..]))
- .collect();
-
- let mut missing = Vec::new();
- let mut wrong_cgus = Vec::new();
-
- for expected_item in &expected {
- let actual_item_with_same_name = actual.iter()
- .find(|ti| ti.name == expected_item.name);
-
- if let Some(actual_item) = actual_item_with_same_name {
- if !expected_item.codegen_units.is_empty() {
- // Also check for codegen units
- if expected_item.codegen_units != actual_item.codegen_units {
- wrong_cgus.push((expected_item.clone(), actual_item.clone()));
- }
- }
- } else {
- missing.push(expected_item.string.clone());
- }
- }
-
- let unexpected: Vec<_> =
- actual.iter()
- .filter(|acgu| !expected.iter().any(|ecgu| acgu.name == ecgu.name))
- .map(|acgu| acgu.string.clone())
- .collect();
-
- if !missing.is_empty() {
- missing.sort();
-
- println!("\nThese items should have been contained but were not:\n");
-
- for item in &missing {
- println!("{}", item);
- }
-
- println!("\n");
- }
-
- if !unexpected.is_empty() {
- let sorted = {
- let mut sorted = unexpected.clone();
- sorted.sort();
- sorted
- };
-
- println!("\nThese items were contained but should not have been:\n");
-
- for item in sorted {
- println!("{}", item);
- }
-
- println!("\n");
- }
-
- if !wrong_cgus.is_empty() {
- wrong_cgus.sort_by_key(|pair| pair.0.name.clone());
- println!("\nThe following items were assigned to wrong codegen units:\n");
-
- for &(ref expected_item, ref actual_item) in &wrong_cgus {
- println!("{}", expected_item.name);
- println!(" expected: {}", codegen_units_to_str(&expected_item.codegen_units));
- println!(" actual: {}", codegen_units_to_str(&actual_item.codegen_units));
- println!("");
- }
- }
-
- if !(missing.is_empty() && unexpected.is_empty() && wrong_cgus.is_empty())
- {
- panic!();
- }
-
- #[derive(Clone, Eq, PartialEq)]
- struct TransItem {
- name: String,
- codegen_units: HashSet<String>,
- string: String,
- }
-
- // [TRANS_ITEM] name [@@ (cgu)+]
- fn str_to_trans_item(s: &str) -> TransItem {
- let s = if s.starts_with(PREFIX) {
- (&s[PREFIX.len()..]).trim()
- } else {
- s.trim()
- };
-
- let full_string = format!("{}{}", PREFIX, s.trim().to_owned());
-
- let parts: Vec<&str> = s.split(CGU_MARKER)
- .map(str::trim)
- .filter(|s| !s.is_empty())
- .collect();
-
- let name = parts[0].trim();
-
- let cgus = if parts.len() > 1 {
- let cgus_str = parts[1];
-
- cgus_str.split(" ")
- .map(str::trim)
- .filter(|s| !s.is_empty())
- .map(str::to_owned)
- .collect()
- }
- else {
- HashSet::new()
- };
-
- TransItem {
- name: name.to_owned(),
- codegen_units: cgus,
- string: full_string,
- }
- }
-
- fn codegen_units_to_str(cgus: &HashSet<String>) -> String
- {
- let mut cgus: Vec<_> = cgus.iter().collect();
- cgus.sort();
-
- let mut string = String::new();
- for cgu in cgus {
- string.push_str(&cgu[..]);
- string.push_str(" ");
- }
-
- string
- }
-}
-
-fn run_incremental_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
- // Basic plan for a test incremental/foo/bar.rs:
- // - load list of revisions pass1, fail2, pass3
- // - each should begin with `rpass`, `rfail`, or `cfail`
- // - if `rpass`, expect compile and execution to succeed
- // - if `cfail`, expect compilation to fail
- // - if `rfail`, expect execution to fail
- // - create a directory build/foo/bar.incremental
- // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass1
- // - because name of revision starts with "pass", expect success
- // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C fail2
- // - because name of revision starts with "fail", expect an error
- // - load expected errors as usual, but filter for those that end in `[fail2]`
- // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass3
- // - because name of revision starts with "pass", expect success
- // - execute build/foo/bar.exe and save output
- //
- // FIXME -- use non-incremental mode as an oracle? That doesn't apply
- // to #[rustc_dirty] and clean tests I guess
-
- assert!(!props.revisions.is_empty(), "incremental tests require a list of revisions");
-
- let output_base_name = output_base_name(config, testpaths);
-
- // Create the incremental workproduct directory.
- let incremental_dir = output_base_name.with_extension("incremental");
- if incremental_dir.exists() {
- fs::remove_dir_all(&incremental_dir).unwrap();
- }
- fs::create_dir_all(&incremental_dir).unwrap();
-
- if config.verbose {
- print!("incremental_dir={}", incremental_dir.display());
- }
-
- for revision in &props.revisions {
- let mut revision_props = props.clone();
- header::load_props_into(&mut revision_props, &testpaths.file, Some(&revision));
-
- revision_props.compile_flags.extend(vec![
- format!("-Z"),
- format!("incremental={}", incremental_dir.display()),
- format!("--cfg"),
- format!("{}", revision),
- ]);
-
- if config.verbose {
- print!("revision={:?} revision_props={:#?}", revision, revision_props);
- }
-
- if revision.starts_with("rpass") {
- run_rpass_test_revision(config, &revision_props, testpaths, Some(&revision));
- } else if revision.starts_with("rfail") {
- run_rfail_test_revision(config, &revision_props, testpaths, Some(&revision));
- } else if revision.starts_with("cfail") {
- run_cfail_test_revision(config, &revision_props, testpaths, Some(&revision));
- } else {
- fatal(
- Some(revision),
- "revision name must begin with rpass, rfail, or cfail");
- }
- }
-}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::env;
-use common::Config;
-
-/// Conversion table from triple OS name to Rust SYSNAME
-const OS_TABLE: &'static [(&'static str, &'static str)] = &[
- ("android", "android"),
- ("bitrig", "bitrig"),
- ("darwin", "macos"),
- ("dragonfly", "dragonfly"),
- ("freebsd", "freebsd"),
- ("ios", "ios"),
- ("linux", "linux"),
- ("mingw32", "windows"),
- ("netbsd", "netbsd"),
- ("openbsd", "openbsd"),
- ("win32", "windows"),
- ("windows", "windows"),
- ("solaris", "solaris"),
- ("emscripten", "emscripten"),
-];
-
-const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[
- ("aarch64", "aarch64"),
- ("amd64", "x86_64"),
- ("arm", "arm"),
- ("arm64", "aarch64"),
- ("hexagon", "hexagon"),
- ("i386", "x86"),
- ("i686", "x86"),
- ("mips", "mips"),
- ("msp430", "msp430"),
- ("powerpc", "powerpc"),
- ("powerpc64", "powerpc64"),
- ("s390x", "systemz"),
- ("sparc", "sparc"),
- ("x86_64", "x86_64"),
- ("xcore", "xcore"),
- ("asmjs", "asmjs"),
-];
-
-pub fn get_os(triple: &str) -> &'static str {
- for &(triple_os, os) in OS_TABLE {
- if triple.contains(triple_os) {
- return os
- }
- }
- panic!("Cannot determine OS from triple");
-}
-pub fn get_arch(triple: &str) -> &'static str {
- for &(triple_arch, arch) in ARCH_TABLE {
- if triple.contains(triple_arch) {
- return arch
- }
- }
- panic!("Cannot determine Architecture from triple");
-}
-
-pub fn get_env(triple: &str) -> Option<&str> {
- triple.split('-').nth(3)
-}
-
-pub fn make_new_path(path: &str) -> String {
- assert!(cfg!(windows));
- // Windows just uses PATH as the library search path, so we have to
- // maintain the current value while adding our own
- match env::var(lib_path_env_var()) {
- Ok(curr) => {
- format!("{}{}{}", path, path_div(), curr)
- }
- Err(..) => path.to_owned()
- }
-}
-
-pub fn lib_path_env_var() -> &'static str { "PATH" }
-fn path_div() -> &'static str { ";" }
-
-pub fn logv(config: &Config, s: String) {
- debug!("{}", s);
- if config.verbose { println!("{}", s); }
-}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Check that method bounds declared on traits/impls in a cross-crate
+// scenario work. This is the library portion of the test.
+
+pub enum MaybeOwned<'a> {
+ Owned(isize),
+ Borrowed(&'a isize)
+}
+
+pub struct Inv<'a> { // invariant w/r/t 'a
+ x: &'a mut &'a isize
+}
+
+// I encountered a bug at some point with encoding the IntoMaybeOwned
+// trait, so I'll use that as the template for this test.
+pub trait IntoMaybeOwned<'a> {
+ fn into_maybe_owned(self) -> MaybeOwned<'a>;
+
+ // Note: without this `into_inv` method, the trait is
+ // contravariant w/r/t `'a`, since if you look strictly at the
+ // interface, it only returns `'a`. This complicates the
+ // downstream test since it wants invariance to force an error.
+ // Hence we add this method.
+ fn into_inv(self) -> Inv<'a>;
+
+ fn bigger_region<'b:'a>(self, b: Inv<'b>);
+}
+
+impl<'a> IntoMaybeOwned<'a> for Inv<'a> {
+ fn into_maybe_owned(self) -> MaybeOwned<'a> { panic!() }
+ fn into_inv(self) -> Inv<'a> { panic!() }
+ fn bigger_region<'b:'a>(self, b: Inv<'b>) { panic!() }
+}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Check that method bounds declared on traits/impls in a cross-crate
-// scenario work. This is the library portion of the test.
-
-pub enum MaybeOwned<'a> {
- Owned(isize),
- Borrowed(&'a isize)
-}
-
-pub struct Inv<'a> { // invariant w/r/t 'a
- x: &'a mut &'a isize
-}
-
-// I encountered a bug at some point with encoding the IntoMaybeOwned
-// trait, so I'll use that as the template for this test.
-pub trait IntoMaybeOwned<'a> {
- fn into_maybe_owned(self) -> MaybeOwned<'a>;
-
- // Note: without this `into_inv` method, the trait is
- // contravariant w/r/t `'a`, since if you look strictly at the
- // interface, it only returns `'a`. This complicates the
- // downstream test since it wants invariance to force an error.
- // Hence we add this method.
- fn into_inv(self) -> Inv<'a>;
-
- fn bigger_region<'b:'a>(self, b: Inv<'b>);
-}
-
-impl<'a> IntoMaybeOwned<'a> for Inv<'a> {
- fn into_maybe_owned(self) -> MaybeOwned<'a> { panic!() }
- fn into_inv(self) -> Inv<'a> { panic!() }
- fn bigger_region<'b:'a>(self, b: Inv<'b>) { panic!() }
-}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(optin_builtin_traits, core)]
+#![crate_type = "rlib"]
+
+pub trait DefaultedTrait { }
+impl DefaultedTrait for .. { }
+
+pub struct Something<T> { t: T }
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(optin_builtin_traits, core)]
-#![crate_type = "rlib"]
-
-pub trait DefaultedTrait { }
-impl DefaultedTrait for .. { }
-
-pub struct Something<T> { t: T }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// aux-build:regions_bounded_method_type_parameters_cross_crate_lib.rs
+// aux-build:rbmtp_cross_crate_lib.rs
// Check explicit region bounds on methods in the cross crate case.
-extern crate regions_bounded_method_type_parameters_cross_crate_lib as lib;
+extern crate rbmtp_cross_crate_lib as lib;
use lib::Inv;
use lib::MaybeOwned;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// aux-build:typeck_default_trait_impl_cross_crate_coherence_lib.rs
+// aux-build:tdticc_coherence_lib.rs
// Test that we do not consider associated types to be sendable without
// some applicable trait bound (and we don't ICE).
#![feature(optin_builtin_traits)]
-extern crate typeck_default_trait_impl_cross_crate_coherence_lib as lib;
+extern crate tdticc_coherence_lib as lib;
use lib::DefaultedTrait;
--- /dev/null
+[root]
+name = "compiletest"
+version = "0.0.0"
+dependencies = [
+ "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.62 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "log"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "memchr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "mempool"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "regex"
+version = "0.1.62"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mempool 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "utf8-ranges"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
--- /dev/null
+[package]
+authors = ["The Rust Project Developers"]
+name = "compiletest"
+version = "0.0.0"
+build = "build.rs"
+
+# Curiously, this will segfault if compiled with opt-level=3 on 64-bit MSVC when
+# running the compile-fail test suite when a should-fail test panics. But hey if
+# this is removed and it gets past the bots, sounds good to me.
+[profile.release]
+opt-level = 2
+
+[dependencies]
+log = "0.3"
+env_logger = "0.3"
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ println!("cargo:rustc-cfg=cargobuild");
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+pub use self::Mode::*;
+
+use std::fmt;
+use std::str::FromStr;
+use std::path::PathBuf;
+
+#[derive(Clone, Copy, PartialEq, Debug)]
+pub enum Mode {
+ CompileFail,
+ ParseFail,
+ RunFail,
+ RunPass,
+ RunPassValgrind,
+ Pretty,
+ DebugInfoGdb,
+ DebugInfoLldb,
+ Codegen,
+ Rustdoc,
+ CodegenUnits,
+ Incremental,
+}
+
+impl FromStr for Mode {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Mode, ()> {
+ match s {
+ "compile-fail" => Ok(CompileFail),
+ "parse-fail" => Ok(ParseFail),
+ "run-fail" => Ok(RunFail),
+ "run-pass" => Ok(RunPass),
+ "run-pass-valgrind" => Ok(RunPassValgrind),
+ "pretty" => Ok(Pretty),
+ "debuginfo-lldb" => Ok(DebugInfoLldb),
+ "debuginfo-gdb" => Ok(DebugInfoGdb),
+ "codegen" => Ok(Codegen),
+ "rustdoc" => Ok(Rustdoc),
+ "codegen-units" => Ok(CodegenUnits),
+ "incremental" => Ok(Incremental),
+ _ => Err(()),
+ }
+ }
+}
+
+impl fmt::Display for Mode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(match *self {
+ CompileFail => "compile-fail",
+ ParseFail => "parse-fail",
+ RunFail => "run-fail",
+ RunPass => "run-pass",
+ RunPassValgrind => "run-pass-valgrind",
+ Pretty => "pretty",
+ DebugInfoGdb => "debuginfo-gdb",
+ DebugInfoLldb => "debuginfo-lldb",
+ Codegen => "codegen",
+ Rustdoc => "rustdoc",
+ CodegenUnits => "codegen-units",
+ Incremental => "incremental",
+ }, f)
+ }
+}
+
+#[derive(Clone)]
+pub struct Config {
+ // The library paths required for running the compiler
+ pub compile_lib_path: PathBuf,
+
+ // The library paths required for running compiled programs
+ pub run_lib_path: PathBuf,
+
+ // The rustc executable
+ pub rustc_path: PathBuf,
+
+ // The rustdoc executable
+ pub rustdoc_path: PathBuf,
+
+ // The python executable
+ pub python: String,
+
+ // The llvm FileCheck binary path
+ pub llvm_filecheck: Option<PathBuf>,
+
+ // The valgrind path
+ pub valgrind_path: Option<String>,
+
+ // Whether to fail if we can't run run-pass-valgrind tests under valgrind
+ // (or, alternatively, to silently run them like regular run-pass tests).
+ pub force_valgrind: bool,
+
+ // The directory containing the tests to run
+ pub src_base: PathBuf,
+
+ // The directory where programs should be built
+ pub build_base: PathBuf,
+
+ // Directory for auxiliary libraries
+ pub aux_base: PathBuf,
+
+ // The name of the stage being built (stage1, etc)
+ pub stage_id: String,
+
+ // The test mode, compile-fail, run-fail, run-pass
+ pub mode: Mode,
+
+ // Run ignored tests
+ pub run_ignored: bool,
+
+ // Only run tests that match this filter
+ pub filter: Option<String>,
+
+ // Write out a parseable log of tests that were run
+ pub logfile: Option<PathBuf>,
+
+ // A command line to prefix program execution with,
+ // for running under valgrind
+ pub runtool: Option<String>,
+
+ // Flags to pass to the compiler when building for the host
+ pub host_rustcflags: Option<String>,
+
+ // Flags to pass to the compiler when building for the target
+ pub target_rustcflags: Option<String>,
+
+ // Target system to be tested
+ pub target: String,
+
+ // Host triple for the compiler being invoked
+ pub host: String,
+
+ // Version of GDB
+ pub gdb_version: Option<String>,
+
+ // Version of LLDB
+ pub lldb_version: Option<String>,
+
+ // Path to the android tools
+ pub android_cross_path: PathBuf,
+
+ // Extra parameter to run adb on arm-linux-androideabi
+ pub adb_path: String,
+
+ // Extra parameter to run test suite on arm-linux-androideabi
+ pub adb_test_dir: String,
+
+ // status whether android device available or not
+ pub adb_device_status: bool,
+
+ // the path containing LLDB's Python module
+ pub lldb_python_dir: Option<String>,
+
+ // Explain what's going on
+ pub verbose: bool,
+
+ // Print one character per test instead of one line
+ pub quiet: bool,
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+use self::WhichLine::*;
+
+use std::fmt;
+use std::fs::File;
+use std::io::BufReader;
+use std::io::prelude::*;
+use std::path::Path;
+use std::str::FromStr;
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum ErrorKind {
+ Help,
+ Error,
+ Note,
+ Suggestion,
+ Warning,
+}
+
+impl FromStr for ErrorKind {
+ type Err = ();
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match &s.trim_right_matches(':') as &str {
+ "HELP" => Ok(ErrorKind::Help),
+ "ERROR" => Ok(ErrorKind::Error),
+ "NOTE" => Ok(ErrorKind::Note),
+ "SUGGESTION" => Ok(ErrorKind::Suggestion),
+ "WARN" => Ok(ErrorKind::Warning),
+ "WARNING" => Ok(ErrorKind::Warning),
+ _ => Err(()),
+ }
+ }
+}
+
+impl fmt::Display for ErrorKind {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ ErrorKind::Help => write!(f, "help"),
+ ErrorKind::Error => write!(f, "error"),
+ ErrorKind::Note => write!(f, "note"),
+ ErrorKind::Suggestion => write!(f, "suggestion"),
+ ErrorKind::Warning => write!(f, "warning"),
+ }
+ }
+}
+
+pub struct ExpectedError {
+ pub line_num: usize,
+ /// What kind of message we expect (e.g. warning, error, suggestion).
+ /// `None` if not specified or unknown message kind.
+ pub kind: Option<ErrorKind>,
+ pub msg: String,
+}
+
+#[derive(PartialEq, Debug)]
+enum WhichLine { ThisLine, FollowPrevious(usize), AdjustBackward(usize) }
+
+/// Looks for either "//~| KIND MESSAGE" or "//~^^... KIND MESSAGE"
+/// The former is a "follow" that inherits its target from the preceding line;
+/// the latter is an "adjusts" that goes that many lines up.
+///
+/// Goal is to enable tests both like: //~^^^ ERROR go up three
+/// and also //~^ ERROR message one for the preceding line, and
+/// //~| ERROR message two for that same line.
+///
+/// If cfg is not None (i.e., in an incremental test), then we look
+/// for `//[X]~` instead, where `X` is the current `cfg`.
+pub fn load_errors(testfile: &Path, cfg: Option<&str>) -> Vec<ExpectedError> {
+ let rdr = BufReader::new(File::open(testfile).unwrap());
+
+ // `last_nonfollow_error` tracks the most recently seen
+ // line with an error template that did not use the
+ // follow-syntax, "//~| ...".
+ //
+ // (pnkfelix could not find an easy way to compose Iterator::scan
+ // and Iterator::filter_map to pass along this information into
+ // `parse_expected`. So instead I am storing that state here and
+ // updating it in the map callback below.)
+ let mut last_nonfollow_error = None;
+
+ let tag = match cfg {
+ Some(rev) => format!("//[{}]~", rev),
+ None => format!("//~")
+ };
+
+ rdr.lines()
+ .enumerate()
+ .filter_map(|(line_num, line)| {
+ parse_expected(last_nonfollow_error,
+ line_num + 1,
+ &line.unwrap(),
+ &tag)
+ .map(|(which, error)| {
+ match which {
+ FollowPrevious(_) => {}
+ _ => last_nonfollow_error = Some(error.line_num),
+ }
+ error
+ })
+ })
+ .collect()
+}
+
+fn parse_expected(last_nonfollow_error: Option<usize>,
+ line_num: usize,
+ line: &str,
+ tag: &str)
+ -> Option<(WhichLine, ExpectedError)> {
+ let start = match line.find(tag) { Some(i) => i, None => return None };
+ let (follow, adjusts) = if line[start + tag.len()..].chars().next().unwrap() == '|' {
+ (true, 0)
+ } else {
+ (false, line[start + tag.len()..].chars().take_while(|c| *c == '^').count())
+ };
+ let kind_start = start + tag.len() + adjusts + (follow as usize);
+ let kind = line[kind_start..].split_whitespace()
+ .next()
+ .expect("Encountered unexpected empty comment")
+ .parse::<ErrorKind>()
+ .ok();
+ let letters = line[kind_start..].chars();
+ let msg = letters.skip_while(|c| c.is_whitespace())
+ .skip_while(|c| !c.is_whitespace())
+ .collect::<String>().trim().to_owned();
+
+ let (which, line_num) = if follow {
+ assert!(adjusts == 0, "use either //~| or //~^, not both.");
+ let line_num = last_nonfollow_error.expect("encountered //~| without \
+ preceding //~^ line.");
+ (FollowPrevious(line_num), line_num)
+ } else {
+ let which =
+ if adjusts > 0 { AdjustBackward(adjusts) } else { ThisLine };
+ let line_num = line_num - adjusts;
+ (which, line_num)
+ };
+
+ debug!("line={} tag={:?} which={:?} kind={:?} msg={:?}",
+ line_num, tag, which, kind, msg);
+ Some((which, ExpectedError { line_num: line_num,
+ kind: kind,
+ msg: msg, }))
+}
--- /dev/null
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::env;
+use std::fs::File;
+use std::io::BufReader;
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
+
+use common::Config;
+use common;
+use util;
+
+#[derive(Clone, Debug)]
+pub struct TestProps {
+ // Lines that should be expected, in order, on standard out
+ pub error_patterns: Vec<String> ,
+ // Extra flags to pass to the compiler
+ pub compile_flags: Vec<String>,
+ // Extra flags to pass when the compiled code is run (such as --bench)
+ pub run_flags: Option<String>,
+ // If present, the name of a file that this test should match when
+ // pretty-printed
+ pub pp_exact: Option<PathBuf>,
+ // Modules from aux directory that should be compiled
+ pub aux_builds: Vec<String> ,
+ // Environment settings to use for compiling
+ pub rustc_env: Vec<(String,String)> ,
+ // Environment settings to use during execution
+ pub exec_env: Vec<(String,String)> ,
+ // Lines to check if they appear in the expected debugger output
+ pub check_lines: Vec<String> ,
+ // Build documentation for all specified aux-builds as well
+ pub build_aux_docs: bool,
+ // Flag to force a crate to be built with the host architecture
+ pub force_host: bool,
+ // Check stdout for error-pattern output as well as stderr
+ pub check_stdout: bool,
+ // Don't force a --crate-type=dylib flag on the command line
+ pub no_prefer_dynamic: bool,
+ // Run --pretty expanded when running pretty printing tests
+ pub pretty_expanded: bool,
+ // Which pretty mode are we testing with, default to 'normal'
+ pub pretty_mode: String,
+ // Only compare pretty output and don't try compiling
+ pub pretty_compare_only: bool,
+ // Patterns which must not appear in the output of a cfail test.
+ pub forbid_output: Vec<String>,
+ // Revisions to test for incremental compilation.
+ pub revisions: Vec<String>,
+}
+
+// Load any test directives embedded in the file
+pub fn load_props(testfile: &Path) -> TestProps {
+ let error_patterns = Vec::new();
+ let aux_builds = Vec::new();
+ let exec_env = Vec::new();
+ let run_flags = None;
+ let pp_exact = None;
+ let check_lines = Vec::new();
+ let build_aux_docs = false;
+ let force_host = false;
+ let check_stdout = false;
+ let no_prefer_dynamic = false;
+ let pretty_expanded = false;
+ let pretty_compare_only = false;
+ let forbid_output = Vec::new();
+ let mut props = TestProps {
+ error_patterns: error_patterns,
+ compile_flags: vec![],
+ run_flags: run_flags,
+ pp_exact: pp_exact,
+ aux_builds: aux_builds,
+ revisions: vec![],
+ rustc_env: vec![],
+ exec_env: exec_env,
+ check_lines: check_lines,
+ build_aux_docs: build_aux_docs,
+ force_host: force_host,
+ check_stdout: check_stdout,
+ no_prefer_dynamic: no_prefer_dynamic,
+ pretty_expanded: pretty_expanded,
+ pretty_mode: format!("normal"),
+ pretty_compare_only: pretty_compare_only,
+ forbid_output: forbid_output,
+ };
+ load_props_into(&mut props, testfile, None);
+ props
+}
+
+/// Load properties from `testfile` into `props`. If a property is
+/// tied to a particular revision `foo` (indicated by writing
+/// `//[foo]`), then the property is ignored unless `cfg` is
+/// `Some("foo")`.
+pub fn load_props_into(props: &mut TestProps, testfile: &Path, cfg: Option<&str>) {
+ iter_header(testfile, cfg, &mut |ln| {
+ if let Some(ep) = parse_error_pattern(ln) {
+ props.error_patterns.push(ep);
+ }
+
+ if let Some(flags) = parse_compile_flags(ln) {
+ props.compile_flags.extend(
+ flags
+ .split_whitespace()
+ .map(|s| s.to_owned()));
+ }
+
+ if let Some(r) = parse_revisions(ln) {
+ props.revisions.extend(r);
+ }
+
+ if props.run_flags.is_none() {
+ props.run_flags = parse_run_flags(ln);
+ }
+
+ if props.pp_exact.is_none() {
+ props.pp_exact = parse_pp_exact(ln, testfile);
+ }
+
+ if !props.build_aux_docs {
+ props.build_aux_docs = parse_build_aux_docs(ln);
+ }
+
+ if !props.force_host {
+ props.force_host = parse_force_host(ln);
+ }
+
+ if !props.check_stdout {
+ props.check_stdout = parse_check_stdout(ln);
+ }
+
+ if !props.no_prefer_dynamic {
+ props.no_prefer_dynamic = parse_no_prefer_dynamic(ln);
+ }
+
+ if !props.pretty_expanded {
+ props.pretty_expanded = parse_pretty_expanded(ln);
+ }
+
+ if let Some(m) = parse_pretty_mode(ln) {
+ props.pretty_mode = m;
+ }
+
+ if !props.pretty_compare_only {
+ props.pretty_compare_only = parse_pretty_compare_only(ln);
+ }
+
+ if let Some(ab) = parse_aux_build(ln) {
+ props.aux_builds.push(ab);
+ }
+
+ if let Some(ee) = parse_env(ln, "exec-env") {
+ props.exec_env.push(ee);
+ }
+
+ if let Some(ee) = parse_env(ln, "rustc-env") {
+ props.rustc_env.push(ee);
+ }
+
+ if let Some(cl) = parse_check_line(ln) {
+ props.check_lines.push(cl);
+ }
+
+ if let Some(of) = parse_forbid_output(ln) {
+ props.forbid_output.push(of);
+ }
+ });
+
+ for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
+ match env::var(key) {
+ Ok(val) =>
+ if props.exec_env.iter().find(|&&(ref x, _)| *x == key).is_none() {
+ props.exec_env.push((key.to_owned(), val))
+ },
+ Err(..) => {}
+ }
+ }
+}
+
+pub struct EarlyProps {
+ pub ignore: bool,
+ pub should_fail: bool,
+}
+
+// scan the file to detect whether the test should be ignored and
+// whether it should panic; these are two things the test runner needs
+// to know early, before actually running the test
+pub fn early_props(config: &Config, testfile: &Path) -> EarlyProps {
+ let mut props = EarlyProps {
+ ignore: false,
+ should_fail: false,
+ };
+
+ iter_header(testfile, None, &mut |ln| {
+ props.ignore =
+ props.ignore ||
+ parse_name_directive(ln, "ignore-test") ||
+ parse_name_directive(ln, &ignore_target(config)) ||
+ parse_name_directive(ln, &ignore_architecture(config)) ||
+ parse_name_directive(ln, &ignore_stage(config)) ||
+ parse_name_directive(ln, &ignore_env(config)) ||
+ (config.mode == common::Pretty &&
+ parse_name_directive(ln, "ignore-pretty")) ||
+ (config.target != config.host &&
+ parse_name_directive(ln, "ignore-cross-compile")) ||
+ ignore_gdb(config, ln) ||
+ ignore_lldb(config, ln);
+
+ props.should_fail =
+ props.should_fail ||
+ parse_name_directive(ln, "should-fail");
+ });
+
+ return props;
+
+ fn ignore_target(config: &Config) -> String {
+ format!("ignore-{}", util::get_os(&config.target))
+ }
+ fn ignore_architecture(config: &Config) -> String {
+ format!("ignore-{}", util::get_arch(&config.target))
+ }
+ fn ignore_stage(config: &Config) -> String {
+ format!("ignore-{}",
+ config.stage_id.split('-').next().unwrap())
+ }
+ fn ignore_env(config: &Config) -> String {
+ format!("ignore-{}", util::get_env(&config.target).unwrap_or("<unknown>"))
+ }
+ fn ignore_gdb(config: &Config, line: &str) -> bool {
+ if config.mode != common::DebugInfoGdb {
+ return false;
+ }
+
+ if parse_name_directive(line, "ignore-gdb") {
+ return true;
+ }
+
+ if let Some(ref actual_version) = config.gdb_version {
+ if line.contains("min-gdb-version") {
+ let min_version = line.trim()
+ .split(' ')
+ .last()
+ .expect("Malformed GDB version directive");
+ // Ignore if actual version is smaller the minimum required
+ // version
+ gdb_version_to_int(actual_version) <
+ gdb_version_to_int(min_version)
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ }
+
+ fn ignore_lldb(config: &Config, line: &str) -> bool {
+ if config.mode != common::DebugInfoLldb {
+ return false;
+ }
+
+ if parse_name_directive(line, "ignore-lldb") {
+ return true;
+ }
+
+ if let Some(ref actual_version) = config.lldb_version {
+ if line.contains("min-lldb-version") {
+ let min_version = line.trim()
+ .split(' ')
+ .last()
+ .expect("Malformed lldb version directive");
+ // Ignore if actual version is smaller the minimum required
+ // version
+ lldb_version_to_int(actual_version) <
+ lldb_version_to_int(min_version)
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ }
+}
+
+fn iter_header(testfile: &Path,
+ cfg: Option<&str>,
+ it: &mut FnMut(&str)) {
+ let rdr = BufReader::new(File::open(testfile).unwrap());
+ for ln in rdr.lines() {
+ // Assume that any directives will be found before the first
+ // module or function. This doesn't seem to be an optimization
+ // with a warm page cache. Maybe with a cold one.
+ let ln = ln.unwrap();
+ let ln = ln.trim();
+ if ln.starts_with("fn") || ln.starts_with("mod") {
+ return;
+ } else if ln.starts_with("//[") {
+ // A comment like `//[foo]` is specific to revision `foo`
+ if let Some(close_brace) = ln.find("]") {
+ let lncfg = &ln[3..close_brace];
+ let matches = match cfg {
+ Some(s) => s == &lncfg[..],
+ None => false,
+ };
+ if matches {
+ it(&ln[close_brace+1..]);
+ }
+ } else {
+ panic!("malformed condition directive: expected `//[foo]`, found `{}`",
+ ln)
+ }
+ } else if ln.starts_with("//") {
+ it(&ln[2..]);
+ }
+ }
+ return;
+}
+
+fn parse_error_pattern(line: &str) -> Option<String> {
+ parse_name_value_directive(line, "error-pattern")
+}
+
+fn parse_forbid_output(line: &str) -> Option<String> {
+ parse_name_value_directive(line, "forbid-output")
+}
+
+fn parse_aux_build(line: &str) -> Option<String> {
+ parse_name_value_directive(line, "aux-build")
+}
+
+fn parse_compile_flags(line: &str) -> Option<String> {
+ parse_name_value_directive(line, "compile-flags")
+}
+
+fn parse_revisions(line: &str) -> Option<Vec<String>> {
+ parse_name_value_directive(line, "revisions")
+ .map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
+}
+
+fn parse_run_flags(line: &str) -> Option<String> {
+ parse_name_value_directive(line, "run-flags")
+}
+
+fn parse_check_line(line: &str) -> Option<String> {
+ parse_name_value_directive(line, "check")
+}
+
+fn parse_force_host(line: &str) -> bool {
+ parse_name_directive(line, "force-host")
+}
+
+fn parse_build_aux_docs(line: &str) -> bool {
+ parse_name_directive(line, "build-aux-docs")
+}
+
+fn parse_check_stdout(line: &str) -> bool {
+ parse_name_directive(line, "check-stdout")
+}
+
+fn parse_no_prefer_dynamic(line: &str) -> bool {
+ parse_name_directive(line, "no-prefer-dynamic")
+}
+
+fn parse_pretty_expanded(line: &str) -> bool {
+ parse_name_directive(line, "pretty-expanded")
+}
+
+fn parse_pretty_mode(line: &str) -> Option<String> {
+ parse_name_value_directive(line, "pretty-mode")
+}
+
+fn parse_pretty_compare_only(line: &str) -> bool {
+ parse_name_directive(line, "pretty-compare-only")
+}
+
+fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
+ parse_name_value_directive(line, name).map(|nv| {
+ // nv is either FOO or FOO=BAR
+ let mut strs: Vec<String> = nv
+ .splitn(2, '=')
+ .map(str::to_owned)
+ .collect();
+
+ match strs.len() {
+ 1 => (strs.pop().unwrap(), "".to_owned()),
+ 2 => {
+ let end = strs.pop().unwrap();
+ (strs.pop().unwrap(), end)
+ }
+ n => panic!("Expected 1 or 2 strings, not {}", n)
+ }
+ })
+}
+
+fn parse_pp_exact(line: &str, testfile: &Path) -> Option<PathBuf> {
+ if let Some(s) = parse_name_value_directive(line, "pp-exact") {
+ Some(PathBuf::from(&s))
+ } else {
+ if parse_name_directive(line, "pp-exact") {
+ testfile.file_name().map(PathBuf::from)
+ } else {
+ None
+ }
+ }
+}
+
+fn parse_name_directive(line: &str, directive: &str) -> bool {
+ // This 'no-' rule is a quick hack to allow pretty-expanded and no-pretty-expanded to coexist
+ line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
+}
+
+pub fn parse_name_value_directive(line: &str, directive: &str)
+ -> Option<String> {
+ let keycolon = format!("{}:", directive);
+ if let Some(colon) = line.find(&keycolon) {
+ let value = line[(colon + keycolon.len()) .. line.len()].to_owned();
+ debug!("{}: {}", directive, value);
+ Some(value)
+ } else {
+ None
+ }
+}
+
+pub fn gdb_version_to_int(version_string: &str) -> isize {
+ let error_string = format!(
+ "Encountered GDB version string with unexpected format: {}",
+ version_string);
+ let error_string = error_string;
+
+ let components: Vec<&str> = version_string.trim().split('.').collect();
+
+ if components.len() != 2 {
+ panic!("{}", error_string);
+ }
+
+ let major: isize = components[0].parse().ok().expect(&error_string);
+ let minor: isize = components[1].parse().ok().expect(&error_string);
+
+ return major * 1000 + minor;
+}
+
+pub fn lldb_version_to_int(version_string: &str) -> isize {
+ let error_string = format!(
+ "Encountered LLDB version string with unexpected format: {}",
+ version_string);
+ let error_string = error_string;
+ let major: isize = version_string.parse().ok().expect(&error_string);
+ return major;
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "compiletest"]
+
+#![feature(box_syntax)]
+#![feature(rustc_private)]
+#![feature(test)]
+#![feature(question_mark)]
+#![feature(libc)]
+
+#![deny(warnings)]
+
+extern crate libc;
+extern crate test;
+extern crate getopts;
+
+#[macro_use]
+extern crate log;
+
+#[cfg(cargobuild)]
+extern crate env_logger;
+
+use std::env;
+use std::fs;
+use std::io;
+use std::path::{Path, PathBuf};
+use getopts::{optopt, optflag, reqopt};
+use common::Config;
+use common::{Pretty, DebugInfoGdb, DebugInfoLldb};
+use test::TestPaths;
+use util::logv;
+
+pub mod procsrv;
+pub mod util;
+pub mod header;
+pub mod runtest;
+pub mod common;
+pub mod errors;
+mod raise_fd_limit;
+
+fn main() {
+ #[cfg(cargobuild)]
+ fn log_init() { env_logger::init().unwrap(); }
+ #[cfg(not(cargobuild))]
+ fn log_init() {}
+ log_init();
+
+ let config = parse_config(env::args().collect());
+
+ if config.valgrind_path.is_none() && config.force_valgrind {
+ panic!("Can't find Valgrind to run Valgrind tests");
+ }
+
+ log_config(&config);
+ run_tests(&config);
+}
+
+pub fn parse_config(args: Vec<String> ) -> Config {
+
+ let groups : Vec<getopts::OptGroup> =
+ vec!(reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"),
+ reqopt("", "run-lib-path", "path to target shared libraries", "PATH"),
+ reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"),
+ reqopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH"),
+ reqopt("", "python", "path to python to use for doc tests", "PATH"),
+ optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM"),
+ optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind"),
+ optopt("", "llvm-filecheck", "path to LLVM's FileCheck binary", "DIR"),
+ reqopt("", "src-base", "directory to scan for test files", "PATH"),
+ reqopt("", "build-base", "directory to deposit test outputs", "PATH"),
+ reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"),
+ reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"),
+ reqopt("", "mode", "which sort of compile tests to run",
+ "(compile-fail|parse-fail|run-fail|run-pass|\
+ run-pass-valgrind|pretty|debug-info|incremental)"),
+ optflag("", "ignored", "run tests marked as ignored"),
+ optopt("", "runtool", "supervisor program to run tests under \
+ (eg. emulator, valgrind)", "PROGRAM"),
+ optopt("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS"),
+ optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS"),
+ optflag("", "verbose", "run tests verbosely, showing all output"),
+ optflag("", "quiet", "print one character per test instead of one line"),
+ optopt("", "logfile", "file to log test execution to", "FILE"),
+ optopt("", "target", "the target to build for", "TARGET"),
+ optopt("", "host", "the host to build for", "HOST"),
+ optopt("", "gdb-version", "the version of GDB used", "VERSION STRING"),
+ optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING"),
+ optopt("", "android-cross-path", "Android NDK standalone path", "PATH"),
+ optopt("", "adb-path", "path to the android debugger", "PATH"),
+ optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
+ optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH"),
+ optflag("h", "help", "show this message"));
+
+ let (argv0, args_) = args.split_first().unwrap();
+ if args.len() == 1 || args[1] == "-h" || args[1] == "--help" {
+ let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
+ println!("{}", getopts::usage(&message, &groups));
+ println!("");
+ panic!()
+ }
+
+ let matches =
+ &match getopts::getopts(args_, &groups) {
+ Ok(m) => m,
+ Err(f) => panic!("{:?}", f)
+ };
+
+ if matches.opt_present("h") || matches.opt_present("help") {
+ let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0);
+ println!("{}", getopts::usage(&message, &groups));
+ println!("");
+ panic!()
+ }
+
+ fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf {
+ match m.opt_str(nm) {
+ Some(s) => PathBuf::from(&s),
+ None => panic!("no option (=path) found for {}", nm),
+ }
+ }
+
+ fn make_absolute(path: PathBuf) -> PathBuf {
+ if path.is_relative() {
+ env::current_dir().unwrap().join(path)
+ } else {
+ path
+ }
+ }
+
+ Config {
+ compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
+ run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
+ rustc_path: opt_path(matches, "rustc-path"),
+ rustdoc_path: opt_path(matches, "rustdoc-path"),
+ python: matches.opt_str("python").unwrap(),
+ valgrind_path: matches.opt_str("valgrind-path"),
+ force_valgrind: matches.opt_present("force-valgrind"),
+ llvm_filecheck: matches.opt_str("llvm-filecheck").map(|s| PathBuf::from(&s)),
+ src_base: opt_path(matches, "src-base"),
+ build_base: opt_path(matches, "build-base"),
+ aux_base: opt_path(matches, "aux-base"),
+ stage_id: matches.opt_str("stage-id").unwrap(),
+ mode: matches.opt_str("mode").unwrap().parse().ok().expect("invalid mode"),
+ run_ignored: matches.opt_present("ignored"),
+ filter: matches.free.first().cloned(),
+ logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)),
+ runtool: matches.opt_str("runtool"),
+ host_rustcflags: matches.opt_str("host-rustcflags"),
+ target_rustcflags: matches.opt_str("target-rustcflags"),
+ target: opt_str2(matches.opt_str("target")),
+ host: opt_str2(matches.opt_str("host")),
+ gdb_version: extract_gdb_version(matches.opt_str("gdb-version")),
+ lldb_version: extract_lldb_version(matches.opt_str("lldb-version")),
+ android_cross_path: opt_path(matches, "android-cross-path"),
+ adb_path: opt_str2(matches.opt_str("adb-path")),
+ adb_test_dir: format!("{}/{}",
+ opt_str2(matches.opt_str("adb-test-dir")),
+ opt_str2(matches.opt_str("target"))),
+ adb_device_status:
+ opt_str2(matches.opt_str("target")).contains("android") &&
+ "(none)" != opt_str2(matches.opt_str("adb-test-dir")) &&
+ !opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
+ lldb_python_dir: matches.opt_str("lldb-python-dir"),
+ verbose: matches.opt_present("verbose"),
+ quiet: matches.opt_present("quiet"),
+ }
+}
+
+pub fn log_config(config: &Config) {
+ let c = config;
+ logv(c, format!("configuration:"));
+ logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path));
+ logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
+ logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
+ logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path.display()));
+ logv(c, format!("src_base: {:?}", config.src_base.display()));
+ logv(c, format!("build_base: {:?}", config.build_base.display()));
+ logv(c, format!("stage_id: {}", config.stage_id));
+ logv(c, format!("mode: {}", config.mode));
+ logv(c, format!("run_ignored: {}", config.run_ignored));
+ logv(c, format!("filter: {}",
+ opt_str(&config.filter
+ .as_ref()
+ .map(|re| re.to_owned()))));
+ logv(c, format!("runtool: {}", opt_str(&config.runtool)));
+ logv(c, format!("host-rustcflags: {}",
+ opt_str(&config.host_rustcflags)));
+ logv(c, format!("target-rustcflags: {}",
+ opt_str(&config.target_rustcflags)));
+ logv(c, format!("target: {}", config.target));
+ logv(c, format!("host: {}", config.host));
+ logv(c, format!("android-cross-path: {:?}",
+ config.android_cross_path.display()));
+ logv(c, format!("adb_path: {:?}", config.adb_path));
+ logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
+ logv(c, format!("adb_device_status: {}",
+ config.adb_device_status));
+ logv(c, format!("verbose: {}", config.verbose));
+ logv(c, format!("quiet: {}", config.quiet));
+ logv(c, format!("\n"));
+}
+
+pub fn opt_str<'a>(maybestr: &'a Option<String>) -> &'a str {
+ match *maybestr {
+ None => "(none)",
+ Some(ref s) => s,
+ }
+}
+
+pub fn opt_str2(maybestr: Option<String>) -> String {
+ match maybestr {
+ None => "(none)".to_owned(),
+ Some(s) => s,
+ }
+}
+
+pub fn run_tests(config: &Config) {
+ if config.target.contains("android") {
+ if let DebugInfoGdb = config.mode {
+ println!("{} debug-info test uses tcp 5039 port.\
+ please reserve it", config.target);
+ }
+
+ // android debug-info test uses remote debugger
+ // so, we test 1 thread at once.
+ // also trying to isolate problems with adb_run_wrapper.sh ilooping
+ env::set_var("RUST_TEST_THREADS","1");
+ }
+
+ match config.mode {
+ DebugInfoLldb => {
+ // Some older versions of LLDB seem to have problems with multiple
+ // instances running in parallel, so only run one test thread at a
+ // time.
+ env::set_var("RUST_TEST_THREADS", "1");
+ }
+ _ => { /* proceed */ }
+ }
+
+ let opts = test_opts(config);
+ let tests = make_tests(config);
+ // sadly osx needs some file descriptor limits raised for running tests in
+ // parallel (especially when we have lots and lots of child processes).
+ // For context, see #8904
+ unsafe { raise_fd_limit::raise_fd_limit(); }
+ // Prevent issue #21352 UAC blocking .exe containing 'patch' etc. on Windows
+ // If #11207 is resolved (adding manifest to .exe) this becomes unnecessary
+ env::set_var("__COMPAT_LAYER", "RunAsInvoker");
+ let res = test::run_tests_console(&opts, tests.into_iter().collect());
+ match res {
+ Ok(true) => {}
+ Ok(false) => panic!("Some tests failed"),
+ Err(e) => {
+ println!("I/O failure during tests: {:?}", e);
+ }
+ }
+}
+
+pub fn test_opts(config: &Config) -> test::TestOpts {
+ test::TestOpts {
+ filter: config.filter.clone(),
+ run_ignored: config.run_ignored,
+ quiet: config.quiet,
+ logfile: config.logfile.clone(),
+ run_tests: true,
+ bench_benchmarks: true,
+ nocapture: match env::var("RUST_TEST_NOCAPTURE") {
+ Ok(val) => &val != "0",
+ Err(_) => false
+ },
+ color: test::AutoColor,
+ }
+}
+
+pub fn make_tests(config: &Config) -> Vec<test::TestDescAndFn> {
+ debug!("making tests from {:?}",
+ config.src_base.display());
+ let mut tests = Vec::new();
+ collect_tests_from_dir(config,
+ &config.src_base,
+ &config.src_base,
+ &PathBuf::new(),
+ &mut tests)
+ .unwrap();
+ tests
+}
+
+fn collect_tests_from_dir(config: &Config,
+ base: &Path,
+ dir: &Path,
+ relative_dir_path: &Path,
+ tests: &mut Vec<test::TestDescAndFn>)
+ -> io::Result<()> {
+ // Ignore directories that contain a file
+ // `compiletest-ignore-dir`.
+ for file in fs::read_dir(dir)? {
+ let file = file?;
+ if file.file_name() == *"compiletest-ignore-dir" {
+ return Ok(());
+ }
+ }
+
+ let dirs = fs::read_dir(dir)?;
+ for file in dirs {
+ let file = file?;
+ let file_path = file.path();
+ debug!("inspecting file {:?}", file_path.display());
+ if is_test(config, &file_path) {
+ // If we find a test foo/bar.rs, we have to build the
+ // output directory `$build/foo` so we can write
+ // `$build/foo/bar` into it. We do this *now* in this
+ // sequential loop because otherwise, if we do it in the
+ // tests themselves, they race for the privilege of
+ // creating the directories and sometimes fail randomly.
+ let build_dir = config.build_base.join(&relative_dir_path);
+ fs::create_dir_all(&build_dir).unwrap();
+
+ let paths = TestPaths {
+ file: file_path,
+ base: base.to_path_buf(),
+ relative_dir: relative_dir_path.to_path_buf(),
+ };
+ tests.push(make_test(config, &paths))
+ } else if file_path.is_dir() {
+ let relative_file_path = relative_dir_path.join(file.file_name());
+ collect_tests_from_dir(config,
+ base,
+ &file_path,
+ &relative_file_path,
+ tests)?;
+ }
+ }
+ Ok(())
+}
+
+pub fn is_test(config: &Config, testfile: &Path) -> bool {
+ // Pretty-printer does not work with .rc files yet
+ let valid_extensions =
+ match config.mode {
+ Pretty => vec!(".rs".to_owned()),
+ _ => vec!(".rc".to_owned(), ".rs".to_owned())
+ };
+ let invalid_prefixes = vec!(".".to_owned(), "#".to_owned(), "~".to_owned());
+ let name = testfile.file_name().unwrap().to_str().unwrap();
+
+ let mut valid = false;
+
+ for ext in &valid_extensions {
+ if name.ends_with(ext) {
+ valid = true;
+ }
+ }
+
+ for pre in &invalid_prefixes {
+ if name.starts_with(pre) {
+ valid = false;
+ }
+ }
+
+ return valid;
+}
+
+pub fn make_test(config: &Config, testpaths: &TestPaths) -> test::TestDescAndFn {
+ let early_props = header::early_props(config, &testpaths.file);
+
+ // The `should-fail` annotation doesn't apply to pretty tests,
+ // since we run the pretty printer across all tests by default.
+ // If desired, we could add a `should-fail-pretty` annotation.
+ let should_panic = match config.mode {
+ Pretty => test::ShouldPanic::No,
+ _ => if early_props.should_fail {
+ test::ShouldPanic::Yes
+ } else {
+ test::ShouldPanic::No
+ }
+ };
+
+ test::TestDescAndFn {
+ desc: test::TestDesc {
+ name: make_test_name(config, testpaths),
+ ignore: early_props.ignore,
+ should_panic: should_panic,
+ },
+ testfn: make_test_closure(config, testpaths),
+ }
+}
+
+pub fn make_test_name(config: &Config, testpaths: &TestPaths) -> test::TestName {
+ // Convert a complete path to something like
+ //
+ // run-pass/foo/bar/baz.rs
+ let path =
+ PathBuf::from(config.mode.to_string())
+ .join(&testpaths.relative_dir)
+ .join(&testpaths.file.file_name().unwrap());
+ test::DynTestName(format!("[{}] {}", config.mode, path.display()))
+}
+
+pub fn make_test_closure(config: &Config, testpaths: &TestPaths) -> test::TestFn {
+ let config = config.clone();
+ let testpaths = testpaths.clone();
+ test::DynTestFn(Box::new(move || {
+ runtest::run(config, &testpaths)
+ }))
+}
+
+fn extract_gdb_version(full_version_line: Option<String>) -> Option<String> {
+ match full_version_line {
+ Some(ref full_version_line)
+ if !full_version_line.trim().is_empty() => {
+ let full_version_line = full_version_line.trim();
+
+ // used to be a regex "(^|[^0-9])([0-9]\.[0-9]+)"
+ for (pos, c) in full_version_line.char_indices() {
+ if !c.is_digit(10) {
+ continue
+ }
+ if pos + 2 >= full_version_line.len() {
+ continue
+ }
+ if full_version_line[pos + 1..].chars().next().unwrap() != '.' {
+ continue
+ }
+ if !full_version_line[pos + 2..].chars().next().unwrap().is_digit(10) {
+ continue
+ }
+ if pos > 0 && full_version_line[..pos].chars().next_back()
+ .unwrap().is_digit(10) {
+ continue
+ }
+ let mut end = pos + 3;
+ while end < full_version_line.len() &&
+ full_version_line[end..].chars().next()
+ .unwrap().is_digit(10) {
+ end += 1;
+ }
+ return Some(full_version_line[pos..end].to_owned());
+ }
+ println!("Could not extract GDB version from line '{}'",
+ full_version_line);
+ None
+ },
+ _ => None
+ }
+}
+
+fn extract_lldb_version(full_version_line: Option<String>) -> Option<String> {
+ // Extract the major LLDB version from the given version string.
+ // LLDB version strings are different for Apple and non-Apple platforms.
+ // At the moment, this function only supports the Apple variant, which looks
+ // like this:
+ //
+ // LLDB-179.5 (older versions)
+ // lldb-300.2.51 (new versions)
+ //
+ // We are only interested in the major version number, so this function
+ // will return `Some("179")` and `Some("300")` respectively.
+
+ if let Some(ref full_version_line) = full_version_line {
+ if !full_version_line.trim().is_empty() {
+ let full_version_line = full_version_line.trim();
+
+ for (pos, l) in full_version_line.char_indices() {
+ if l != 'l' && l != 'L' { continue }
+ if pos + 5 >= full_version_line.len() { continue }
+ let l = full_version_line[pos + 1..].chars().next().unwrap();
+ if l != 'l' && l != 'L' { continue }
+ let d = full_version_line[pos + 2..].chars().next().unwrap();
+ if d != 'd' && d != 'D' { continue }
+ let b = full_version_line[pos + 3..].chars().next().unwrap();
+ if b != 'b' && b != 'B' { continue }
+ let dash = full_version_line[pos + 4..].chars().next().unwrap();
+ if dash != '-' { continue }
+
+ let vers = full_version_line[pos + 5..].chars().take_while(|c| {
+ c.is_digit(10)
+ }).collect::<String>();
+ if !vers.is_empty() { return Some(vers) }
+ }
+ println!("Could not extract LLDB version from line '{}'",
+ full_version_line);
+ }
+ }
+ None
+}
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::env;
+use std::ffi::OsString;
+use std::io::prelude::*;
+use std::path::PathBuf;
+use std::process::{ExitStatus, Command, Child, Output, Stdio};
+
+fn add_target_env(cmd: &mut Command, lib_path: &str, aux_path: Option<&str>) {
+ // Need to be sure to put both the lib_path and the aux path in the dylib
+ // search path for the child.
+ let var = if cfg!(windows) {
+ "PATH"
+ } else if cfg!(target_os = "macos") {
+ "DYLD_LIBRARY_PATH"
+ } else {
+ "LD_LIBRARY_PATH"
+ };
+ let mut path = env::split_paths(&env::var_os(var).unwrap_or(OsString::new()))
+ .collect::<Vec<_>>();
+ if let Some(p) = aux_path {
+ path.insert(0, PathBuf::from(p))
+ }
+ path.insert(0, PathBuf::from(lib_path));
+
+ // Add the new dylib search path var
+ let newpath = env::join_paths(&path).unwrap();
+ cmd.env(var, newpath);
+}
+
+pub struct Result {pub status: ExitStatus, pub out: String, pub err: String}
+
+pub fn run(lib_path: &str,
+ prog: &str,
+ aux_path: Option<&str>,
+ args: &[String],
+ env: Vec<(String, String)> ,
+ input: Option<String>) -> Option<Result> {
+
+ let mut cmd = Command::new(prog);
+ cmd.args(args)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
+ add_target_env(&mut cmd, lib_path, aux_path);
+ for (key, val) in env {
+ cmd.env(&key, &val);
+ }
+
+ match cmd.spawn() {
+ Ok(mut process) => {
+ if let Some(input) = input {
+ process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
+ }
+ let Output { status, stdout, stderr } =
+ process.wait_with_output().unwrap();
+
+ Some(Result {
+ status: status,
+ out: String::from_utf8(stdout).unwrap(),
+ err: String::from_utf8(stderr).unwrap()
+ })
+ },
+ Err(..) => None
+ }
+}
+
+pub fn run_background(lib_path: &str,
+ prog: &str,
+ aux_path: Option<&str>,
+ args: &[String],
+ env: Vec<(String, String)> ,
+ input: Option<String>) -> Option<Child> {
+
+ let mut cmd = Command::new(prog);
+ cmd.args(args)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
+ add_target_env(&mut cmd, lib_path, aux_path);
+ for (key, val) in env {
+ cmd.env(&key, &val);
+ }
+
+ match cmd.spawn() {
+ Ok(mut process) => {
+ if let Some(input) = input {
+ process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
+ }
+
+ Some(process)
+ },
+ Err(..) => None
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// darwin_fd_limit exists to work around an issue where launchctl on Mac OS X
+/// defaults the rlimit maxfiles to 256/unlimited. The default soft limit of 256
+/// ends up being far too low for our multithreaded scheduler testing, depending
+/// on the number of cores available.
+///
+/// This fixes issue #7772.
+#[cfg(any(target_os = "macos", target_os = "ios"))]
+#[allow(non_camel_case_types)]
+pub unsafe fn raise_fd_limit() {
+ use libc;
+ use std::cmp;
+ use std::io;
+ use std::mem::size_of_val;
+ use std::ptr::null_mut;
+
+ static CTL_KERN: libc::c_int = 1;
+ static KERN_MAXFILESPERPROC: libc::c_int = 29;
+
+ // The strategy here is to fetch the current resource limits, read the
+ // kern.maxfilesperproc sysctl value, and bump the soft resource limit for
+ // maxfiles up to the sysctl value.
+
+ // Fetch the kern.maxfilesperproc value
+ let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC];
+ let mut maxfiles: libc::c_int = 0;
+ let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t;
+ if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size,
+ null_mut(), 0) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling sysctl: {}", err);
+ }
+
+ // Fetch the current resource limits
+ let mut rlim = libc::rlimit{rlim_cur: 0, rlim_max: 0};
+ if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling getrlimit: {}", err);
+ }
+
+ // Bump the soft limit to the smaller of kern.maxfilesperproc and the hard
+ // limit
+ rlim.rlim_cur = cmp::min(maxfiles as libc::rlim_t, rlim.rlim_max);
+
+ // Set our newly-increased resource limit
+ if libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) != 0 {
+ let err = io::Error::last_os_error();
+ panic!("raise_fd_limit: error calling setrlimit: {}", err);
+ }
+}
+
+#[cfg(not(any(target_os = "macos", target_os = "ios")))]
+pub unsafe fn raise_fd_limit() {}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use common::Config;
+use common::{CompileFail, ParseFail, Pretty, RunFail, RunPass, RunPassValgrind};
+use common::{Codegen, DebugInfoLldb, DebugInfoGdb, Rustdoc, CodegenUnits};
+use common::{Incremental};
+use errors::{self, ErrorKind};
+use header::TestProps;
+use header;
+use procsrv;
+use test::TestPaths;
+use util::logv;
+
+use std::env;
+use std::collections::HashSet;
+use std::fmt;
+use std::fs::{self, File};
+use std::io::BufReader;
+use std::io::prelude::*;
+use std::net::TcpStream;
+use std::path::{Path, PathBuf, Component};
+use std::process::{Command, Output, ExitStatus};
+
+pub fn run(config: Config, testpaths: &TestPaths) {
+ match &*config.target {
+
+ "arm-linux-androideabi" | "aarch64-linux-android" => {
+ if !config.adb_device_status {
+ panic!("android device not available");
+ }
+ }
+
+ _=> { }
+ }
+
+ if config.verbose {
+ // We're going to be dumping a lot of info. Start on a new line.
+ print!("\n\n");
+ }
+ debug!("running {:?}", testpaths.file.display());
+ let props = header::load_props(&testpaths.file);
+ debug!("loaded props");
+ match config.mode {
+ CompileFail => run_cfail_test(&config, &props, &testpaths),
+ ParseFail => run_cfail_test(&config, &props, &testpaths),
+ RunFail => run_rfail_test(&config, &props, &testpaths),
+ RunPass => run_rpass_test(&config, &props, &testpaths),
+ RunPassValgrind => run_valgrind_test(&config, &props, &testpaths),
+ Pretty => run_pretty_test(&config, &props, &testpaths),
+ DebugInfoGdb => run_debuginfo_gdb_test(&config, &props, &testpaths),
+ DebugInfoLldb => run_debuginfo_lldb_test(&config, &props, &testpaths),
+ Codegen => run_codegen_test(&config, &props, &testpaths),
+ Rustdoc => run_rustdoc_test(&config, &props, &testpaths),
+ CodegenUnits => run_codegen_units_test(&config, &props, &testpaths),
+ Incremental => run_incremental_test(&config, &props, &testpaths),
+ }
+}
+
+fn get_output(props: &TestProps, proc_res: &ProcRes) -> String {
+ if props.check_stdout {
+ format!("{}{}", proc_res.stdout, proc_res.stderr)
+ } else {
+ proc_res.stderr.clone()
+ }
+}
+
+
+fn for_each_revision<OP>(config: &Config, props: &TestProps, testpaths: &TestPaths,
+ mut op: OP)
+ where OP: FnMut(&Config, &TestProps, &TestPaths, Option<&str>)
+{
+ if props.revisions.is_empty() {
+ op(config, props, testpaths, None)
+ } else {
+ for revision in &props.revisions {
+ let mut revision_props = props.clone();
+ header::load_props_into(&mut revision_props,
+ &testpaths.file,
+ Some(&revision));
+ revision_props.compile_flags.extend(vec![
+ format!("--cfg"),
+ format!("{}", revision),
+ ]);
+ op(config, &revision_props, testpaths, Some(revision));
+ }
+ }
+}
+
+fn run_cfail_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ for_each_revision(config, props, testpaths, run_cfail_test_revision);
+}
+
+fn run_cfail_test_revision(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ revision: Option<&str>) {
+ let proc_res = compile_test(config, props, testpaths);
+
+ if proc_res.status.success() {
+ fatal_proc_rec(
+ revision,
+ &format!("{} test compiled successfully!", config.mode)[..],
+ &proc_res);
+ }
+
+ check_correct_failure_status(revision, &proc_res);
+
+ if proc_res.status.success() {
+ fatal(revision, "process did not return an error status");
+ }
+
+ let output_to_check = get_output(props, &proc_res);
+ let expected_errors = errors::load_errors(&testpaths.file, revision);
+ if !expected_errors.is_empty() {
+ if !props.error_patterns.is_empty() {
+ fatal(revision, "both error pattern and expected errors specified");
+ }
+ check_expected_errors(revision, expected_errors, testpaths, &proc_res);
+ } else {
+ check_error_patterns(revision, props, testpaths, &output_to_check, &proc_res);
+ }
+ check_no_compiler_crash(revision, &proc_res);
+ check_forbid_output(revision, props, &output_to_check, &proc_res);
+}
+
+fn run_rfail_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ for_each_revision(config, props, testpaths, run_rfail_test_revision);
+}
+
+fn run_rfail_test_revision(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ revision: Option<&str>) {
+ let proc_res = compile_test(config, props, testpaths);
+
+ if !proc_res.status.success() {
+ fatal_proc_rec(revision, "compilation failed!", &proc_res);
+ }
+
+ let proc_res = exec_compiled_test(config, props, testpaths);
+
+ // The value our Makefile configures valgrind to return on failure
+ const VALGRIND_ERR: i32 = 100;
+ if proc_res.status.code() == Some(VALGRIND_ERR) {
+ fatal_proc_rec(revision, "run-fail test isn't valgrind-clean!", &proc_res);
+ }
+
+ let output_to_check = get_output(props, &proc_res);
+ check_correct_failure_status(revision, &proc_res);
+ check_error_patterns(revision, props, testpaths, &output_to_check, &proc_res);
+}
+
+fn check_correct_failure_status(revision: Option<&str>, proc_res: &ProcRes) {
+ // The value the rust runtime returns on failure
+ const RUST_ERR: i32 = 101;
+ if proc_res.status.code() != Some(RUST_ERR) {
+ fatal_proc_rec(
+ revision,
+ &format!("failure produced the wrong error: {}",
+ proc_res.status),
+ proc_res);
+ }
+}
+
+fn run_rpass_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ for_each_revision(config, props, testpaths, run_rpass_test_revision);
+}
+
+fn run_rpass_test_revision(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ revision: Option<&str>) {
+ let proc_res = compile_test(config, props, testpaths);
+
+ if !proc_res.status.success() {
+ fatal_proc_rec(revision, "compilation failed!", &proc_res);
+ }
+
+ let proc_res = exec_compiled_test(config, props, testpaths);
+
+ if !proc_res.status.success() {
+ fatal_proc_rec(revision, "test run failed!", &proc_res);
+ }
+}
+
+fn run_valgrind_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ assert!(props.revisions.is_empty(), "revisions not relevant here");
+
+ if config.valgrind_path.is_none() {
+ assert!(!config.force_valgrind);
+ return run_rpass_test(config, props, testpaths);
+ }
+
+ let mut proc_res = compile_test(config, props, testpaths);
+
+ if !proc_res.status.success() {
+ fatal_proc_rec(None, "compilation failed!", &proc_res);
+ }
+
+ let mut new_config = config.clone();
+ new_config.runtool = new_config.valgrind_path.clone();
+ proc_res = exec_compiled_test(&new_config, props, testpaths);
+
+ if !proc_res.status.success() {
+ fatal_proc_rec(None, "test run failed!", &proc_res);
+ }
+}
+
+fn run_pretty_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ for_each_revision(config, props, testpaths, run_pretty_test_revision);
+}
+
+fn run_pretty_test_revision(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ revision: Option<&str>) {
+ if props.pp_exact.is_some() {
+ logv(config, "testing for exact pretty-printing".to_owned());
+ } else {
+ logv(config, "testing for converging pretty-printing".to_owned());
+ }
+
+ let rounds =
+ match props.pp_exact { Some(_) => 1, None => 2 };
+
+ let mut src = String::new();
+ File::open(&testpaths.file).unwrap().read_to_string(&mut src).unwrap();
+ let mut srcs = vec!(src);
+
+ let mut round = 0;
+ while round < rounds {
+ logv(config, format!("pretty-printing round {} revision {:?}",
+ round, revision));
+ let proc_res = print_source(config,
+ props,
+ testpaths,
+ srcs[round].to_owned(),
+ &props.pretty_mode);
+
+ if !proc_res.status.success() {
+ fatal_proc_rec(revision,
+ &format!("pretty-printing failed in round {} revision {:?}",
+ round, revision),
+ &proc_res);
+ }
+
+ let ProcRes{ stdout, .. } = proc_res;
+ srcs.push(stdout);
+ round += 1;
+ }
+
+ let mut expected = match props.pp_exact {
+ Some(ref file) => {
+ let filepath = testpaths.file.parent().unwrap().join(file);
+ let mut s = String::new();
+ File::open(&filepath).unwrap().read_to_string(&mut s).unwrap();
+ s
+ }
+ None => { srcs[srcs.len() - 2].clone() }
+ };
+ let mut actual = srcs[srcs.len() - 1].clone();
+
+ if props.pp_exact.is_some() {
+ // Now we have to care about line endings
+ let cr = "\r".to_owned();
+ actual = actual.replace(&cr, "").to_owned();
+ expected = expected.replace(&cr, "").to_owned();
+ }
+
+ compare_source(revision, &expected, &actual);
+
+ // If we're only making sure that the output matches then just stop here
+ if props.pretty_compare_only { return; }
+
+ // Finally, let's make sure it actually appears to remain valid code
+ let proc_res = typecheck_source(config, props, testpaths, actual);
+ if !proc_res.status.success() {
+ fatal_proc_rec(revision, "pretty-printed source does not typecheck", &proc_res);
+ }
+
+ if !props.pretty_expanded { return }
+
+ // additionally, run `--pretty expanded` and try to build it.
+ let proc_res = print_source(config, props, testpaths, srcs[round].clone(), "expanded");
+ if !proc_res.status.success() {
+ fatal_proc_rec(revision, "pretty-printing (expanded) failed", &proc_res);
+ }
+
+ let ProcRes{ stdout: expanded_src, .. } = proc_res;
+ let proc_res = typecheck_source(config, props, testpaths, expanded_src);
+ if !proc_res.status.success() {
+ fatal_proc_rec(
+ revision,
+ "pretty-printed source (expanded) does not typecheck",
+ &proc_res);
+ }
+
+ return;
+
+ fn print_source(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ src: String,
+ pretty_type: &str) -> ProcRes {
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ compose_and_run(config,
+ testpaths,
+ make_pp_args(config,
+ props,
+ testpaths,
+ pretty_type.to_owned()),
+ props.exec_env.clone(),
+ config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ Some(src))
+ }
+
+ fn make_pp_args(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ pretty_type: String) -> ProcArgs {
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let mut args = vec!("-".to_owned(),
+ "-Zunstable-options".to_owned(),
+ "--unpretty".to_owned(),
+ pretty_type,
+ format!("--target={}", config.target),
+ "-L".to_owned(),
+ aux_dir.to_str().unwrap().to_owned());
+ args.extend(split_maybe_args(&config.target_rustcflags));
+ args.extend(props.compile_flags.iter().cloned());
+ return ProcArgs {
+ prog: config.rustc_path.to_str().unwrap().to_owned(),
+ args: args,
+ };
+ }
+
+ fn compare_source(revision: Option<&str>, expected: &str, actual: &str) {
+ if expected != actual {
+ error(revision, "pretty-printed source does not match expected source");
+ println!("\n\
+expected:\n\
+------------------------------------------\n\
+{}\n\
+------------------------------------------\n\
+actual:\n\
+------------------------------------------\n\
+{}\n\
+------------------------------------------\n\
+\n",
+ expected, actual);
+ panic!();
+ }
+ }
+
+ fn typecheck_source(config: &Config, props: &TestProps,
+ testpaths: &TestPaths, src: String) -> ProcRes {
+ let args = make_typecheck_args(config, props, testpaths);
+ compose_and_run_compiler(config, props, testpaths, args, Some(src))
+ }
+
+ fn make_typecheck_args(config: &Config, props: &TestProps, testpaths: &TestPaths) -> ProcArgs {
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ let target = if props.force_host {
+ &*config.host
+ } else {
+ &*config.target
+ };
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let mut args = vec!("-".to_owned(),
+ "-Zno-trans".to_owned(),
+ format!("--target={}", target),
+ "-L".to_owned(),
+ config.build_base.to_str().unwrap().to_owned(),
+ "-L".to_owned(),
+ aux_dir.to_str().unwrap().to_owned());
+ args.extend(split_maybe_args(&config.target_rustcflags));
+ args.extend(props.compile_flags.iter().cloned());
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ return ProcArgs {
+ prog: config.rustc_path.to_str().unwrap().to_owned(),
+ args: args,
+ };
+ }
+}
+
+fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ assert!(props.revisions.is_empty(), "revisions not relevant here");
+
+ let mut config = Config {
+ target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags),
+ host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags),
+ .. config.clone()
+ };
+
+ let config = &mut config;
+ let DebuggerCommands {
+ commands,
+ check_lines,
+ breakpoint_lines
+ } = parse_debugger_commands(testpaths, "gdb");
+ let mut cmds = commands.join("\n");
+
+ // compile test file (it should have 'compile-flags:-g' in the header)
+ let compiler_run_result = compile_test(config, props, testpaths);
+ if !compiler_run_result.status.success() {
+ fatal_proc_rec(None, "compilation failed!", &compiler_run_result);
+ }
+
+ let exe_file = make_exe_name(config, testpaths);
+
+ let debugger_run_result;
+ match &*config.target {
+ "arm-linux-androideabi" | "aarch64-linux-android" => {
+
+ cmds = cmds.replace("run", "continue");
+
+ // write debugger script
+ let mut script_str = String::with_capacity(2048);
+ script_str.push_str(&format!("set charset {}\n", charset()));
+ script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap()));
+ script_str.push_str("target remote :5039\n");
+ script_str.push_str(&format!("set solib-search-path \
+ ./{}/stage2/lib/rustlib/{}/lib/\n",
+ config.host, config.target));
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!("break {:?}:{}\n",
+ testpaths.file
+ .file_name()
+ .unwrap()
+ .to_string_lossy(),
+ *line)[..]);
+ }
+ script_str.push_str(&cmds);
+ script_str.push_str("\nquit\n");
+
+ debug!("script_str = {}", script_str);
+ dump_output_file(config,
+ testpaths,
+ &script_str,
+ "debugger.script");
+
+
+ procsrv::run("",
+ &config.adb_path,
+ None,
+ &[
+ "push".to_owned(),
+ exe_file.to_str().unwrap().to_owned(),
+ config.adb_test_dir.clone()
+ ],
+ vec!(("".to_owned(), "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{:?}`", config.adb_path));
+
+ procsrv::run("",
+ &config.adb_path,
+ None,
+ &[
+ "forward".to_owned(),
+ "tcp:5039".to_owned(),
+ "tcp:5039".to_owned()
+ ],
+ vec!(("".to_owned(), "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{:?}`", config.adb_path));
+
+ let adb_arg = format!("export LD_LIBRARY_PATH={}; \
+ gdbserver{} :5039 {}/{}",
+ config.adb_test_dir.clone(),
+ if config.target.contains("aarch64")
+ {"64"} else {""},
+ config.adb_test_dir.clone(),
+ exe_file.file_name().unwrap().to_str()
+ .unwrap());
+
+ let mut process = procsrv::run_background("",
+ &config.adb_path
+ ,
+ None,
+ &[
+ "shell".to_owned(),
+ adb_arg.clone()
+ ],
+ vec!(("".to_owned(),
+ "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{:?}`", config.adb_path));
+ loop {
+ //waiting 1 second for gdbserver start
+ ::std::thread::sleep(::std::time::Duration::new(1,0));
+ if TcpStream::connect("127.0.0.1:5039").is_ok() {
+ break
+ }
+ }
+
+ let tool_path = match config.android_cross_path.to_str() {
+ Some(x) => x.to_owned(),
+ None => fatal(None, "cannot find android cross path")
+ };
+
+ let debugger_script = make_out_name(config, testpaths, "debugger.script");
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let debugger_opts =
+ vec!("-quiet".to_owned(),
+ "-batch".to_owned(),
+ "-nx".to_owned(),
+ format!("-command={}", debugger_script.to_str().unwrap()));
+
+ let mut gdb_path = tool_path;
+ gdb_path.push_str(&format!("/bin/{}-gdb", config.target));
+ let procsrv::Result {
+ out,
+ err,
+ status
+ } = procsrv::run("",
+ &gdb_path,
+ None,
+ &debugger_opts,
+ vec!(("".to_owned(), "".to_owned())),
+ None)
+ .expect(&format!("failed to exec `{:?}`", gdb_path));
+ let cmdline = {
+ let cmdline = make_cmdline("",
+ &format!("{}-gdb", config.target),
+ &debugger_opts);
+ logv(config, format!("executing {}", cmdline));
+ cmdline
+ };
+
+ debugger_run_result = ProcRes {
+ status: Status::Normal(status),
+ stdout: out,
+ stderr: err,
+ cmdline: cmdline
+ };
+ if process.kill().is_err() {
+ println!("Adb process is already finished.");
+ }
+ }
+
+ _=> {
+ let rust_src_root = find_rust_src_root(config)
+ .expect("Could not find Rust source root");
+ let rust_pp_module_rel_path = Path::new("./src/etc");
+ let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
+ .to_str()
+ .unwrap()
+ .to_owned();
+ // write debugger script
+ let mut script_str = String::with_capacity(2048);
+ script_str.push_str(&format!("set charset {}\n", charset()));
+ script_str.push_str("show version\n");
+
+ match config.gdb_version {
+ Some(ref version) => {
+ println!("NOTE: compiletest thinks it is using GDB version {}",
+ version);
+
+ if header::gdb_version_to_int(version) >
+ header::gdb_version_to_int("7.4") {
+ // Add the directory containing the pretty printers to
+ // GDB's script auto loading safe path
+ script_str.push_str(
+ &format!("add-auto-load-safe-path {}\n",
+ rust_pp_module_abs_path.replace(r"\", r"\\"))
+ );
+ }
+ }
+ _ => {
+ println!("NOTE: compiletest does not know which version of \
+ GDB it is using");
+ }
+ }
+
+ // The following line actually doesn't have to do anything with
+ // pretty printing, it just tells GDB to print values on one line:
+ script_str.push_str("set print pretty off\n");
+
+ // Add the pretty printer directory to GDB's source-file search path
+ script_str.push_str(&format!("directory {}\n",
+ rust_pp_module_abs_path));
+
+ // Load the target executable
+ script_str.push_str(&format!("file {}\n",
+ exe_file.to_str().unwrap()
+ .replace(r"\", r"\\")));
+
+ // Add line breakpoints
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!("break '{}':{}\n",
+ testpaths.file.file_name().unwrap()
+ .to_string_lossy(),
+ *line));
+ }
+
+ script_str.push_str(&cmds);
+ script_str.push_str("\nquit\n");
+
+ debug!("script_str = {}", script_str);
+ dump_output_file(config,
+ testpaths,
+ &script_str,
+ "debugger.script");
+
+ // run debugger script with gdb
+ fn debugger() -> &'static str {
+ if cfg!(windows) {"gdb.exe"} else {"gdb"}
+ }
+
+ let debugger_script = make_out_name(config, testpaths, "debugger.script");
+
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let debugger_opts =
+ vec!("-quiet".to_owned(),
+ "-batch".to_owned(),
+ "-nx".to_owned(),
+ format!("-command={}", debugger_script.to_str().unwrap()));
+
+ let proc_args = ProcArgs {
+ prog: debugger().to_owned(),
+ args: debugger_opts,
+ };
+
+ let environment = vec![("PYTHONPATH".to_owned(), rust_pp_module_abs_path)];
+
+ debugger_run_result = compose_and_run(config,
+ testpaths,
+ proc_args,
+ environment,
+ config.run_lib_path.to_str().unwrap(),
+ None,
+ None);
+ }
+ }
+
+ if !debugger_run_result.status.success() {
+ fatal(None, "gdb failed to execute");
+ }
+
+ check_debugger_output(&debugger_run_result, &check_lines);
+}
+
+fn find_rust_src_root(config: &Config) -> Option<PathBuf> {
+ let mut path = config.src_base.clone();
+ let path_postfix = Path::new("src/etc/lldb_batchmode.py");
+
+ while path.pop() {
+ if path.join(&path_postfix).is_file() {
+ return Some(path);
+ }
+ }
+
+ return None;
+}
+
+fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ assert!(props.revisions.is_empty(), "revisions not relevant here");
+
+ if config.lldb_python_dir.is_none() {
+ fatal(None, "Can't run LLDB test because LLDB's python path is not set.");
+ }
+
+ let mut config = Config {
+ target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags),
+ host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags),
+ .. config.clone()
+ };
+
+ let config = &mut config;
+
+ // compile test file (it should have 'compile-flags:-g' in the header)
+ let compile_result = compile_test(config, props, testpaths);
+ if !compile_result.status.success() {
+ fatal_proc_rec(None, "compilation failed!", &compile_result);
+ }
+
+ let exe_file = make_exe_name(config, testpaths);
+
+ match config.lldb_version {
+ Some(ref version) => {
+ println!("NOTE: compiletest thinks it is using LLDB version {}",
+ version);
+ }
+ _ => {
+ println!("NOTE: compiletest does not know which version of \
+ LLDB it is using");
+ }
+ }
+
+ // Parse debugger commands etc from test files
+ let DebuggerCommands {
+ commands,
+ check_lines,
+ breakpoint_lines,
+ ..
+ } = parse_debugger_commands(testpaths, "lldb");
+
+ // Write debugger script:
+ // We don't want to hang when calling `quit` while the process is still running
+ let mut script_str = String::from("settings set auto-confirm true\n");
+
+ // Make LLDB emit its version, so we have it documented in the test output
+ script_str.push_str("version\n");
+
+ // Switch LLDB into "Rust mode"
+ let rust_src_root = find_rust_src_root(config)
+ .expect("Could not find Rust source root");
+ let rust_pp_module_rel_path = Path::new("./src/etc/lldb_rust_formatters.py");
+ let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
+ .to_str()
+ .unwrap()
+ .to_owned();
+
+ script_str.push_str(&format!("command script import {}\n",
+ &rust_pp_module_abs_path[..])[..]);
+ script_str.push_str("type summary add --no-value ");
+ script_str.push_str("--python-function lldb_rust_formatters.print_val ");
+ script_str.push_str("-x \".*\" --category Rust\n");
+ script_str.push_str("type category enable Rust\n");
+
+ // Set breakpoints on every line that contains the string "#break"
+ let source_file_name = testpaths.file.file_name().unwrap().to_string_lossy();
+ for line in &breakpoint_lines {
+ script_str.push_str(&format!("breakpoint set --file '{}' --line {}\n",
+ source_file_name,
+ line));
+ }
+
+ // Append the other commands
+ for line in &commands {
+ script_str.push_str(line);
+ script_str.push_str("\n");
+ }
+
+ // Finally, quit the debugger
+ script_str.push_str("\nquit\n");
+
+ // Write the script into a file
+ debug!("script_str = {}", script_str);
+ dump_output_file(config,
+ testpaths,
+ &script_str,
+ "debugger.script");
+ let debugger_script = make_out_name(config, testpaths, "debugger.script");
+
+ // Let LLDB execute the script via lldb_batchmode.py
+ let debugger_run_result = run_lldb(config,
+ testpaths,
+ &exe_file,
+ &debugger_script,
+ &rust_src_root);
+
+ if !debugger_run_result.status.success() {
+ fatal_proc_rec(None, "Error while running LLDB", &debugger_run_result);
+ }
+
+ check_debugger_output(&debugger_run_result, &check_lines);
+
+ fn run_lldb(config: &Config,
+ testpaths: &TestPaths,
+ test_executable: &Path,
+ debugger_script: &Path,
+ rust_src_root: &Path)
+ -> ProcRes {
+ // Prepare the lldb_batchmode which executes the debugger script
+ let lldb_script_path = rust_src_root.join("src/etc/lldb_batchmode.py");
+ cmd2procres(config,
+ testpaths,
+ Command::new(&config.python)
+ .arg(&lldb_script_path)
+ .arg(test_executable)
+ .arg(debugger_script)
+ .env("PYTHONPATH",
+ config.lldb_python_dir.as_ref().unwrap()))
+ }
+}
+
+fn cmd2procres(config: &Config, testpaths: &TestPaths, cmd: &mut Command)
+ -> ProcRes {
+ let (status, out, err) = match cmd.output() {
+ Ok(Output { status, stdout, stderr }) => {
+ (status,
+ String::from_utf8(stdout).unwrap(),
+ String::from_utf8(stderr).unwrap())
+ },
+ Err(e) => {
+ fatal(None, &format!("Failed to setup Python process for \
+ LLDB script: {}", e))
+ }
+ };
+
+ dump_output(config, testpaths, &out, &err);
+ ProcRes {
+ status: Status::Normal(status),
+ stdout: out,
+ stderr: err,
+ cmdline: format!("{:?}", cmd)
+ }
+}
+
+struct DebuggerCommands {
+ commands: Vec<String>,
+ check_lines: Vec<String>,
+ breakpoint_lines: Vec<usize>,
+}
+
+fn parse_debugger_commands(testpaths: &TestPaths, debugger_prefix: &str)
+ -> DebuggerCommands {
+ let command_directive = format!("{}-command", debugger_prefix);
+ let check_directive = format!("{}-check", debugger_prefix);
+
+ let mut breakpoint_lines = vec!();
+ let mut commands = vec!();
+ let mut check_lines = vec!();
+ let mut counter = 1;
+ let reader = BufReader::new(File::open(&testpaths.file).unwrap());
+ for line in reader.lines() {
+ match line {
+ Ok(line) => {
+ if line.contains("#break") {
+ breakpoint_lines.push(counter);
+ }
+
+ header::parse_name_value_directive(
+ &line,
+ &command_directive).map(|cmd| {
+ commands.push(cmd)
+ });
+
+ header::parse_name_value_directive(
+ &line,
+ &check_directive).map(|cmd| {
+ check_lines.push(cmd)
+ });
+ }
+ Err(e) => {
+ fatal(None, &format!("Error while parsing debugger commands: {}", e))
+ }
+ }
+ counter += 1;
+ }
+
+ DebuggerCommands {
+ commands: commands,
+ check_lines: check_lines,
+ breakpoint_lines: breakpoint_lines,
+ }
+}
+
+fn cleanup_debug_info_options(options: &Option<String>) -> Option<String> {
+ if options.is_none() {
+ return None;
+ }
+
+ // Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS.
+ let options_to_remove = [
+ "-O".to_owned(),
+ "-g".to_owned(),
+ "--debuginfo".to_owned()
+ ];
+ let new_options =
+ split_maybe_args(options).into_iter()
+ .filter(|x| !options_to_remove.contains(x))
+ .collect::<Vec<String>>();
+
+ Some(new_options.join(" "))
+}
+
+fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String]) {
+ let num_check_lines = check_lines.len();
+ if num_check_lines > 0 {
+ // Allow check lines to leave parts unspecified (e.g., uninitialized
+ // bits in the wrong case of an enum) with the notation "[...]".
+ let check_fragments: Vec<Vec<String>> =
+ check_lines.iter().map(|s| {
+ s
+ .trim()
+ .split("[...]")
+ .map(str::to_owned)
+ .collect()
+ }).collect();
+ // check if each line in props.check_lines appears in the
+ // output (in order)
+ let mut i = 0;
+ for line in debugger_run_result.stdout.lines() {
+ let mut rest = line.trim();
+ let mut first = true;
+ let mut failed = false;
+ for frag in &check_fragments[i] {
+ let found = if first {
+ if rest.starts_with(frag) {
+ Some(0)
+ } else {
+ None
+ }
+ } else {
+ rest.find(frag)
+ };
+ match found {
+ None => {
+ failed = true;
+ break;
+ }
+ Some(i) => {
+ rest = &rest[(i + frag.len())..];
+ }
+ }
+ first = false;
+ }
+ if !failed && rest.is_empty() {
+ i += 1;
+ }
+ if i == num_check_lines {
+ // all lines checked
+ break;
+ }
+ }
+ if i != num_check_lines {
+ fatal_proc_rec(None, &format!("line not found in debugger output: {}",
+ check_lines.get(i).unwrap()),
+ debugger_run_result);
+ }
+ }
+}
+
+fn check_error_patterns(revision: Option<&str>,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ output_to_check: &str,
+ proc_res: &ProcRes) {
+ if props.error_patterns.is_empty() {
+ fatal(revision,
+ &format!("no error pattern specified in {:?}",
+ testpaths.file.display()));
+ }
+ let mut next_err_idx = 0;
+ let mut next_err_pat = &props.error_patterns[next_err_idx];
+ let mut done = false;
+ for line in output_to_check.lines() {
+ if line.contains(next_err_pat) {
+ debug!("found error pattern {}", next_err_pat);
+ next_err_idx += 1;
+ if next_err_idx == props.error_patterns.len() {
+ debug!("found all error patterns");
+ done = true;
+ break;
+ }
+ next_err_pat = &props.error_patterns[next_err_idx];
+ }
+ }
+ if done { return; }
+
+ let missing_patterns = &props.error_patterns[next_err_idx..];
+ if missing_patterns.len() == 1 {
+ fatal_proc_rec(
+ revision,
+ &format!("error pattern '{}' not found!", missing_patterns[0]),
+ proc_res);
+ } else {
+ for pattern in missing_patterns {
+ error(revision, &format!("error pattern '{}' not found!", *pattern));
+ }
+ fatal_proc_rec(revision, "multiple error patterns not found", proc_res);
+ }
+}
+
+fn check_no_compiler_crash(revision: Option<&str>, proc_res: &ProcRes) {
+ for line in proc_res.stderr.lines() {
+ if line.starts_with("error: internal compiler error:") {
+ fatal_proc_rec(revision,
+ "compiler encountered internal error",
+ proc_res);
+ }
+ }
+}
+
+fn check_forbid_output(revision: Option<&str>,
+ props: &TestProps,
+ output_to_check: &str,
+ proc_res: &ProcRes) {
+ for pat in &props.forbid_output {
+ if output_to_check.contains(pat) {
+ fatal_proc_rec(revision,
+ "forbidden pattern found in compiler output",
+ proc_res);
+ }
+ }
+}
+
+fn check_expected_errors(revision: Option<&str>,
+ expected_errors: Vec<errors::ExpectedError>,
+ testpaths: &TestPaths,
+ proc_res: &ProcRes) {
+ // true if we found the error in question
+ let mut found_flags = vec![false; expected_errors.len()];
+
+ if proc_res.status.success() {
+ fatal_proc_rec(revision, "process did not return an error status", proc_res);
+ }
+
+ let prefixes = expected_errors.iter().map(|ee| {
+ let expected = format!("{}:{}:", testpaths.file.display(), ee.line_num);
+ // On windows just translate all '\' path separators to '/'
+ expected.replace(r"\", "/")
+ }).collect::<Vec<String>>();
+
+ // If the testcase being checked contains at least one expected "help"
+ // message, then we'll ensure that all "help" messages are expected.
+ // Otherwise, all "help" messages reported by the compiler will be ignored.
+ // This logic also applies to "note" messages.
+ let (expect_help, expect_note) =
+ expected_errors.iter()
+ .fold((false, false),
+ |(acc_help, acc_note), ee|
+ (acc_help || ee.kind == Some(ErrorKind::Help),
+ acc_note || ee.kind == Some(ErrorKind::Note)));
+
+ // Scan and extract our error/warning messages,
+ // which look like:
+ // filename:line1:col1: line2:col2: *error:* msg
+ // filename:line1:col1: line2:col2: *warning:* msg
+ // where line1:col1: is the starting point, line2:col2:
+ // is the ending point, and * represents ANSI color codes.
+ //
+ // This pattern is ambiguous on windows, because filename may contain
+ // a colon, so any path prefix must be detected and removed first.
+ let mut unexpected = 0;
+ let mut not_found = 0;
+ for line in proc_res.stderr.lines() {
+ let mut was_expected = false;
+ let mut prev = 0;
+ for (i, ee) in expected_errors.iter().enumerate() {
+ if !found_flags[i] {
+ debug!("prefix={} ee.kind={:?} ee.msg={} line={}",
+ prefixes[i],
+ ee.kind,
+ ee.msg,
+ line);
+ // Suggestions have no line number in their output, so take on the line number of
+ // the previous expected error
+ if ee.kind == Some(ErrorKind::Suggestion) {
+ assert!(expected_errors[prev].kind == Some(ErrorKind::Help),
+ "SUGGESTIONs must be preceded by a HELP");
+ if line.contains(&ee.msg) {
+ found_flags[i] = true;
+ was_expected = true;
+ break;
+ }
+ }
+ if
+ (prefix_matches(line, &prefixes[i]) || continuation(line)) &&
+ (ee.kind.is_none() || line.contains(&ee.kind.as_ref().unwrap().to_string())) &&
+ line.contains(&ee.msg)
+ {
+ found_flags[i] = true;
+ was_expected = true;
+ break;
+ }
+ }
+ prev = i;
+ }
+
+ // ignore this msg which gets printed at the end
+ if line.contains("aborting due to") {
+ was_expected = true;
+ }
+
+ if !was_expected && is_unexpected_compiler_message(line, expect_help, expect_note) {
+ error(revision, &format!("unexpected compiler message: '{}'", line));
+ unexpected += 1;
+ }
+ }
+
+ for (i, &flag) in found_flags.iter().enumerate() {
+ if !flag {
+ let ee = &expected_errors[i];
+ error(revision, &format!("expected {} on line {} not found: {}",
+ ee.kind.as_ref()
+ .map_or("message".into(),
+ |k| k.to_string()),
+ ee.line_num, ee.msg));
+ not_found += 1;
+ }
+ }
+
+ if unexpected > 0 || not_found > 0 {
+ fatal_proc_rec(
+ revision,
+ &format!("{} unexpected errors found, {} expected errors not found",
+ unexpected, not_found),
+ proc_res);
+ }
+
+ fn prefix_matches(line: &str, prefix: &str) -> bool {
+ use std::ascii::AsciiExt;
+ // On windows just translate all '\' path separators to '/'
+ let line = line.replace(r"\", "/");
+ if cfg!(windows) {
+ line.to_ascii_lowercase().starts_with(&prefix.to_ascii_lowercase())
+ } else {
+ line.starts_with(prefix)
+ }
+ }
+
+ // A multi-line error will have followup lines which start with a space
+ // or open paren.
+ fn continuation( line: &str) -> bool {
+ line.starts_with(" ") || line.starts_with("(")
+ }
+}
+
+fn is_unexpected_compiler_message(line: &str, expect_help: bool, expect_note: bool) -> bool {
+ let mut c = Path::new(line).components();
+ let line = match c.next() {
+ Some(Component::Prefix(_)) => c.as_path().to_str().unwrap(),
+ _ => line,
+ };
+
+ let mut i = 0;
+ return scan_until_char(line, ':', &mut i) &&
+ scan_char(line, ':', &mut i) &&
+ scan_integer(line, &mut i) &&
+ scan_char(line, ':', &mut i) &&
+ scan_integer(line, &mut i) &&
+ scan_char(line, ':', &mut i) &&
+ scan_char(line, ' ', &mut i) &&
+ scan_integer(line, &mut i) &&
+ scan_char(line, ':', &mut i) &&
+ scan_integer(line, &mut i) &&
+ scan_char(line, ' ', &mut i) &&
+ (scan_string(line, "error", &mut i) ||
+ scan_string(line, "warning", &mut i) ||
+ (expect_help && scan_string(line, "help", &mut i)) ||
+ (expect_note && scan_string(line, "note", &mut i))
+ );
+}
+
+fn scan_until_char(haystack: &str, needle: char, idx: &mut usize) -> bool {
+ if *idx >= haystack.len() {
+ return false;
+ }
+ let opt = haystack[(*idx)..].find(needle);
+ if opt.is_none() {
+ return false;
+ }
+ *idx = opt.unwrap();
+ return true;
+}
+
+fn scan_char(haystack: &str, needle: char, idx: &mut usize) -> bool {
+ if *idx >= haystack.len() {
+ return false;
+ }
+ let ch = haystack[*idx..].chars().next().unwrap();
+ if ch != needle {
+ return false;
+ }
+ *idx += ch.len_utf8();
+ return true;
+}
+
+fn scan_integer(haystack: &str, idx: &mut usize) -> bool {
+ let mut i = *idx;
+ while i < haystack.len() {
+ let ch = haystack[i..].chars().next().unwrap();
+ if ch < '0' || '9' < ch {
+ break;
+ }
+ i += ch.len_utf8();
+ }
+ if i == *idx {
+ return false;
+ }
+ *idx = i;
+ return true;
+}
+
+fn scan_string(haystack: &str, needle: &str, idx: &mut usize) -> bool {
+ let mut haystack_i = *idx;
+ let mut needle_i = 0;
+ while needle_i < needle.len() {
+ if haystack_i >= haystack.len() {
+ return false;
+ }
+ let ch = haystack[haystack_i..].chars().next().unwrap();
+ haystack_i += ch.len_utf8();
+ if !scan_char(needle, ch, &mut needle_i) {
+ return false;
+ }
+ }
+ *idx = haystack_i;
+ return true;
+}
+
+struct ProcArgs {
+ prog: String,
+ args: Vec<String>,
+}
+
+struct ProcRes {
+ status: Status,
+ stdout: String,
+ stderr: String,
+ cmdline: String,
+}
+
+enum Status {
+ Parsed(i32),
+ Normal(ExitStatus),
+}
+
+impl Status {
+ fn code(&self) -> Option<i32> {
+ match *self {
+ Status::Parsed(i) => Some(i),
+ Status::Normal(ref e) => e.code(),
+ }
+ }
+
+ fn success(&self) -> bool {
+ match *self {
+ Status::Parsed(i) => i == 0,
+ Status::Normal(ref e) => e.success(),
+ }
+ }
+}
+
+impl fmt::Display for Status {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ Status::Parsed(i) => write!(f, "exit code: {}", i),
+ Status::Normal(ref e) => e.fmt(f),
+ }
+ }
+}
+
+fn compile_test(config: &Config, props: &TestProps,
+ testpaths: &TestPaths) -> ProcRes {
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let link_args = vec!("-L".to_owned(),
+ aux_dir.to_str().unwrap().to_owned());
+ let args = make_compile_args(config,
+ props,
+ link_args,
+ |a, b| TargetLocation::ThisFile(make_exe_name(a, b)), testpaths);
+ compose_and_run_compiler(config, props, testpaths, args, None)
+}
+
+fn document(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ out_dir: &Path)
+ -> ProcRes {
+ if props.build_aux_docs {
+ for rel_ab in &props.aux_builds {
+ let aux_testpaths = compute_aux_test_paths(config, testpaths, rel_ab);
+ let aux_props = header::load_props(&aux_testpaths.file);
+ let auxres = document(config, &aux_props, &aux_testpaths, out_dir);
+ if !auxres.status.success() {
+ return auxres;
+ }
+ }
+ }
+
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ let mut args = vec!["-L".to_owned(),
+ aux_dir.to_str().unwrap().to_owned(),
+ "-o".to_owned(),
+ out_dir.to_str().unwrap().to_owned(),
+ testpaths.file.to_str().unwrap().to_owned()];
+ args.extend(props.compile_flags.iter().cloned());
+ let args = ProcArgs {
+ prog: config.rustdoc_path.to_str().unwrap().to_owned(),
+ args: args,
+ };
+ compose_and_run_compiler(config, props, testpaths, args, None)
+}
+
+fn exec_compiled_test(config: &Config, props: &TestProps,
+ testpaths: &TestPaths) -> ProcRes {
+
+ let env = props.exec_env.clone();
+
+ match &*config.target {
+
+ "arm-linux-androideabi" | "aarch64-linux-android" => {
+ _arm_exec_compiled_test(config, props, testpaths, env)
+ }
+
+ _=> {
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ compose_and_run(config,
+ testpaths,
+ make_run_args(config, props, testpaths),
+ env,
+ config.run_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None)
+ }
+ }
+}
+
+fn compute_aux_test_paths(config: &Config,
+ testpaths: &TestPaths,
+ rel_ab: &str)
+ -> TestPaths
+{
+ let abs_ab = config.aux_base.join(rel_ab);
+ TestPaths {
+ file: abs_ab,
+ base: testpaths.base.clone(),
+ relative_dir: Path::new(rel_ab).parent()
+ .map(|p| p.to_path_buf())
+ .unwrap_or_else(|| PathBuf::new())
+ }
+}
+
+fn compose_and_run_compiler(config: &Config, props: &TestProps,
+ testpaths: &TestPaths, args: ProcArgs,
+ input: Option<String>) -> ProcRes {
+ if !props.aux_builds.is_empty() {
+ ensure_dir(&aux_output_dir_name(config, testpaths));
+ }
+
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let extra_link_args = vec!["-L".to_owned(),
+ aux_dir.to_str().unwrap().to_owned()];
+
+ for rel_ab in &props.aux_builds {
+ let aux_testpaths = compute_aux_test_paths(config, testpaths, rel_ab);
+ let aux_props = header::load_props(&aux_testpaths.file);
+ let mut crate_type = if aux_props.no_prefer_dynamic {
+ Vec::new()
+ } else {
+ // We primarily compile all auxiliary libraries as dynamic libraries
+ // to avoid code size bloat and large binaries as much as possible
+ // for the test suite (otherwise including libstd statically in all
+ // executables takes up quite a bit of space).
+ //
+ // For targets like MUSL or Emscripten, however, there is no support for
+ // dynamic libraries so we just go back to building a normal library. Note,
+ // however, that for MUSL if the library is built with `force_host` then
+ // it's ok to be a dylib as the host should always support dylibs.
+ if (config.target.contains("musl") && !aux_props.force_host) ||
+ config.target.contains("emscripten")
+ {
+ vec!("--crate-type=lib".to_owned())
+ } else {
+ vec!("--crate-type=dylib".to_owned())
+ }
+ };
+ crate_type.extend(extra_link_args.clone());
+ let aux_args =
+ make_compile_args(config,
+ &aux_props,
+ crate_type,
+ |a,b| {
+ let f = make_lib_name(a, &b.file, testpaths);
+ let parent = f.parent().unwrap();
+ TargetLocation::ThisDirectory(parent.to_path_buf())
+ },
+ &aux_testpaths);
+ let auxres = compose_and_run(config,
+ &aux_testpaths,
+ aux_args,
+ Vec::new(),
+ config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ None);
+ if !auxres.status.success() {
+ fatal_proc_rec(
+ None,
+ &format!("auxiliary build of {:?} failed to compile: ",
+ aux_testpaths.file.display()),
+ &auxres);
+ }
+
+ match &*config.target {
+ "arm-linux-androideabi" | "aarch64-linux-android" => {
+ _arm_push_aux_shared_library(config, testpaths);
+ }
+ _ => {}
+ }
+ }
+
+ compose_and_run(config,
+ testpaths,
+ args,
+ props.rustc_env.clone(),
+ config.compile_lib_path.to_str().unwrap(),
+ Some(aux_dir.to_str().unwrap()),
+ input)
+}
+
+fn ensure_dir(path: &Path) {
+ if path.is_dir() { return; }
+ fs::create_dir_all(path).unwrap();
+}
+
+fn compose_and_run(config: &Config,
+ testpaths: &TestPaths,
+ ProcArgs{ args, prog }: ProcArgs,
+ procenv: Vec<(String, String)> ,
+ lib_path: &str,
+ aux_path: Option<&str>,
+ input: Option<String>) -> ProcRes {
+ return program_output(config, testpaths, lib_path,
+ prog, aux_path, args, procenv, input);
+}
+
+enum TargetLocation {
+ ThisFile(PathBuf),
+ ThisDirectory(PathBuf),
+}
+
+fn make_compile_args<F>(config: &Config,
+ props: &TestProps,
+ extras: Vec<String> ,
+ xform: F,
+ testpaths: &TestPaths)
+ -> ProcArgs where
+ F: FnOnce(&Config, &TestPaths) -> TargetLocation,
+{
+ let xform_file = xform(config, testpaths);
+ let target = if props.force_host {
+ &*config.host
+ } else {
+ &*config.target
+ };
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let mut args = vec!(testpaths.file.to_str().unwrap().to_owned(),
+ "-L".to_owned(),
+ config.build_base.to_str().unwrap().to_owned(),
+ format!("--target={}", target));
+ args.extend_from_slice(&extras);
+ if !props.no_prefer_dynamic {
+ args.push("-C".to_owned());
+ args.push("prefer-dynamic".to_owned());
+ }
+ let path = match xform_file {
+ TargetLocation::ThisFile(path) => {
+ args.push("-o".to_owned());
+ path
+ }
+ TargetLocation::ThisDirectory(path) => {
+ args.push("--out-dir".to_owned());
+ path
+ }
+ };
+ args.push(path.to_str().unwrap().to_owned());
+ if props.force_host {
+ args.extend(split_maybe_args(&config.host_rustcflags));
+ } else {
+ args.extend(split_maybe_args(&config.target_rustcflags));
+ }
+ args.extend(props.compile_flags.iter().cloned());
+ return ProcArgs {
+ prog: config.rustc_path.to_str().unwrap().to_owned(),
+ args: args,
+ };
+}
+
+fn make_lib_name(config: &Config, auxfile: &Path, testpaths: &TestPaths) -> PathBuf {
+ // what we return here is not particularly important, as it
+ // happens; rustc ignores everything except for the directory.
+ let auxname = output_testname(auxfile);
+ aux_output_dir_name(config, testpaths).join(&auxname)
+}
+
+fn make_exe_name(config: &Config, testpaths: &TestPaths) -> PathBuf {
+ let mut f = output_base_name(config, testpaths);
+ // FIXME: This is using the host architecture exe suffix, not target!
+ if config.target == "asmjs-unknown-emscripten" {
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(".js");
+ f.set_file_name(&fname);
+ } else if !env::consts::EXE_SUFFIX.is_empty() {
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(env::consts::EXE_SUFFIX);
+ f.set_file_name(&fname);
+ }
+ f
+}
+
+fn make_run_args(config: &Config, props: &TestProps, testpaths: &TestPaths)
+ -> ProcArgs {
+ // If we've got another tool to run under (valgrind),
+ // then split apart its command
+ let mut args = split_maybe_args(&config.runtool);
+
+ // If this is emscripten, then run tests under nodejs
+ if config.target == "asmjs-unknown-emscripten" {
+ args.push("nodejs".to_owned());
+ }
+
+ let exe_file = make_exe_name(config, testpaths);
+
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ args.push(exe_file.to_str().unwrap().to_owned());
+
+ // Add the arguments in the run_flags directive
+ args.extend(split_maybe_args(&props.run_flags));
+
+ let prog = args.remove(0);
+ return ProcArgs {
+ prog: prog,
+ args: args,
+ };
+}
+
+fn split_maybe_args(argstr: &Option<String>) -> Vec<String> {
+ match *argstr {
+ Some(ref s) => {
+ s
+ .split(' ')
+ .filter_map(|s| {
+ if s.chars().all(|c| c.is_whitespace()) {
+ None
+ } else {
+ Some(s.to_owned())
+ }
+ }).collect()
+ }
+ None => Vec::new()
+ }
+}
+
+fn program_output(config: &Config, testpaths: &TestPaths, lib_path: &str, prog: String,
+ aux_path: Option<&str>, args: Vec<String>,
+ env: Vec<(String, String)>,
+ input: Option<String>) -> ProcRes {
+ let cmdline =
+ {
+ let cmdline = make_cmdline(lib_path,
+ &prog,
+ &args);
+ logv(config, format!("executing {}", cmdline));
+ cmdline
+ };
+ let procsrv::Result {
+ out,
+ err,
+ status
+ } = procsrv::run(lib_path,
+ &prog,
+ aux_path,
+ &args,
+ env,
+ input).expect(&format!("failed to exec `{}`", prog));
+ dump_output(config, testpaths, &out, &err);
+ return ProcRes {
+ status: Status::Normal(status),
+ stdout: out,
+ stderr: err,
+ cmdline: cmdline,
+ };
+}
+
+fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String {
+ use util;
+
+ // Linux and mac don't require adjusting the library search path
+ if cfg!(unix) {
+ format!("{} {}", prog, args.join(" "))
+ } else {
+ // Build the LD_LIBRARY_PATH variable as it would be seen on the command line
+ // for diagnostic purposes
+ fn lib_path_cmd_prefix(path: &str) -> String {
+ format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
+ }
+
+ format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.join(" "))
+ }
+}
+
+fn dump_output(config: &Config, testpaths: &TestPaths, out: &str, err: &str) {
+ dump_output_file(config, testpaths, out, "out");
+ dump_output_file(config, testpaths, err, "err");
+ maybe_dump_to_stdout(config, out, err);
+}
+
+fn dump_output_file(config: &Config,
+ testpaths: &TestPaths,
+ out: &str,
+ extension: &str) {
+ let outfile = make_out_name(config, testpaths, extension);
+ File::create(&outfile).unwrap().write_all(out.as_bytes()).unwrap();
+}
+
+fn make_out_name(config: &Config, testpaths: &TestPaths, extension: &str) -> PathBuf {
+ output_base_name(config, testpaths).with_extension(extension)
+}
+
+fn aux_output_dir_name(config: &Config, testpaths: &TestPaths) -> PathBuf {
+ let f = output_base_name(config, testpaths);
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push(&format!(".{}.libaux", config.mode));
+ f.with_file_name(&fname)
+}
+
+fn output_testname(filepath: &Path) -> PathBuf {
+ PathBuf::from(filepath.file_stem().unwrap())
+}
+
+fn output_base_name(config: &Config, testpaths: &TestPaths) -> PathBuf {
+ let dir = config.build_base.join(&testpaths.relative_dir);
+
+ // Note: The directory `dir` is created during `collect_tests_from_dir`
+ dir
+ .join(&output_testname(&testpaths.file))
+ .with_extension(&config.stage_id)
+}
+
+fn maybe_dump_to_stdout(config: &Config, out: &str, err: &str) {
+ if config.verbose {
+ println!("------{}------------------------------", "stdout");
+ println!("{}", out);
+ println!("------{}------------------------------", "stderr");
+ println!("{}", err);
+ println!("------------------------------------------");
+ }
+}
+
+fn error(revision: Option<&str>, err: &str) {
+ match revision {
+ Some(rev) => println!("\nerror in revision `{}`: {}", rev, err),
+ None => println!("\nerror: {}", err)
+ }
+}
+
+fn fatal(revision: Option<&str>, err: &str) -> ! {
+ error(revision, err); panic!();
+}
+
+fn fatal_proc_rec(revision: Option<&str>, err: &str, proc_res: &ProcRes) -> ! {
+ error(revision, err);
+ print!("\
+status: {}\n\
+command: {}\n\
+stdout:\n\
+------------------------------------------\n\
+{}\n\
+------------------------------------------\n\
+stderr:\n\
+------------------------------------------\n\
+{}\n\
+------------------------------------------\n\
+\n",
+ proc_res.status, proc_res.cmdline, proc_res.stdout,
+ proc_res.stderr);
+ panic!();
+}
+
+fn _arm_exec_compiled_test(config: &Config,
+ props: &TestProps,
+ testpaths: &TestPaths,
+ env: Vec<(String, String)>)
+ -> ProcRes {
+ let args = make_run_args(config, props, testpaths);
+ let cmdline = make_cmdline("",
+ &args.prog,
+ &args.args);
+
+ // get bare program string
+ let mut tvec: Vec<String> = args.prog
+ .split('/')
+ .map(str::to_owned)
+ .collect();
+ let prog_short = tvec.pop().unwrap();
+
+ // copy to target
+ let copy_result = procsrv::run("",
+ &config.adb_path,
+ None,
+ &[
+ "push".to_owned(),
+ args.prog.clone(),
+ config.adb_test_dir.clone()
+ ],
+ vec!(("".to_owned(), "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{}`", config.adb_path));
+
+ if config.verbose {
+ println!("push ({}) {} {} {}",
+ config.target,
+ args.prog,
+ copy_result.out,
+ copy_result.err);
+ }
+
+ logv(config, format!("executing ({}) {}", config.target, cmdline));
+
+ let mut runargs = Vec::new();
+
+ // run test via adb_run_wrapper
+ runargs.push("shell".to_owned());
+ for (key, val) in env {
+ runargs.push(format!("{}={}", key, val));
+ }
+ runargs.push(format!("{}/../adb_run_wrapper.sh", config.adb_test_dir));
+ runargs.push(format!("{}", config.adb_test_dir));
+ runargs.push(format!("{}", prog_short));
+
+ for tv in &args.args {
+ runargs.push(tv.to_owned());
+ }
+ procsrv::run("",
+ &config.adb_path,
+ None,
+ &runargs,
+ vec!(("".to_owned(), "".to_owned())), Some("".to_owned()))
+ .expect(&format!("failed to exec `{}`", config.adb_path));
+
+ // get exitcode of result
+ runargs = Vec::new();
+ runargs.push("shell".to_owned());
+ runargs.push("cat".to_owned());
+ runargs.push(format!("{}/{}.exitcode", config.adb_test_dir, prog_short));
+
+ let procsrv::Result{ out: exitcode_out, err: _, status: _ } =
+ procsrv::run("",
+ &config.adb_path,
+ None,
+ &runargs,
+ vec!(("".to_owned(), "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{}`", config.adb_path));
+
+ let mut exitcode: i32 = 0;
+ for c in exitcode_out.chars() {
+ if !c.is_numeric() { break; }
+ exitcode = exitcode * 10 + match c {
+ '0' ... '9' => c as i32 - ('0' as i32),
+ _ => 101,
+ }
+ }
+
+ // get stdout of result
+ runargs = Vec::new();
+ runargs.push("shell".to_owned());
+ runargs.push("cat".to_owned());
+ runargs.push(format!("{}/{}.stdout", config.adb_test_dir, prog_short));
+
+ let procsrv::Result{ out: stdout_out, err: _, status: _ } =
+ procsrv::run("",
+ &config.adb_path,
+ None,
+ &runargs,
+ vec!(("".to_owned(), "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{}`", config.adb_path));
+
+ // get stderr of result
+ runargs = Vec::new();
+ runargs.push("shell".to_owned());
+ runargs.push("cat".to_owned());
+ runargs.push(format!("{}/{}.stderr", config.adb_test_dir, prog_short));
+
+ let procsrv::Result{ out: stderr_out, err: _, status: _ } =
+ procsrv::run("",
+ &config.adb_path,
+ None,
+ &runargs,
+ vec!(("".to_owned(), "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{}`", config.adb_path));
+
+ dump_output(config,
+ testpaths,
+ &stdout_out,
+ &stderr_out);
+
+ ProcRes {
+ status: Status::Parsed(exitcode),
+ stdout: stdout_out,
+ stderr: stderr_out,
+ cmdline: cmdline
+ }
+}
+
+fn _arm_push_aux_shared_library(config: &Config, testpaths: &TestPaths) {
+ let tdir = aux_output_dir_name(config, testpaths);
+
+ let dirs = fs::read_dir(&tdir).unwrap();
+ for file in dirs {
+ let file = file.unwrap().path();
+ if file.extension().and_then(|s| s.to_str()) == Some("so") {
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let copy_result = procsrv::run("",
+ &config.adb_path,
+ None,
+ &[
+ "push".to_owned(),
+ file.to_str()
+ .unwrap()
+ .to_owned(),
+ config.adb_test_dir.to_owned(),
+ ],
+ vec!(("".to_owned(),
+ "".to_owned())),
+ Some("".to_owned()))
+ .expect(&format!("failed to exec `{}`", config.adb_path));
+
+ if config.verbose {
+ println!("push ({}) {:?} {} {}",
+ config.target, file.display(),
+ copy_result.out, copy_result.err);
+ }
+ }
+ }
+}
+
+// codegen tests (using FileCheck)
+
+fn compile_test_and_save_ir(config: &Config, props: &TestProps,
+ testpaths: &TestPaths) -> ProcRes {
+ let aux_dir = aux_output_dir_name(config, testpaths);
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ let mut link_args = vec!("-L".to_owned(),
+ aux_dir.to_str().unwrap().to_owned());
+ let llvm_args = vec!("--emit=llvm-ir".to_owned(),);
+ link_args.extend(llvm_args);
+ let args = make_compile_args(config,
+ props,
+ link_args,
+ |a, b| TargetLocation::ThisDirectory(
+ output_base_name(a, b).parent()
+ .unwrap().to_path_buf()),
+ testpaths);
+ compose_and_run_compiler(config, props, testpaths, args, None)
+}
+
+fn check_ir_with_filecheck(config: &Config, testpaths: &TestPaths) -> ProcRes {
+ let irfile = output_base_name(config, testpaths).with_extension("ll");
+ let prog = config.llvm_filecheck.as_ref().unwrap();
+ let proc_args = ProcArgs {
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ prog: prog.to_str().unwrap().to_owned(),
+ args: vec!(format!("-input-file={}", irfile.to_str().unwrap()),
+ testpaths.file.to_str().unwrap().to_owned())
+ };
+ compose_and_run(config, testpaths, proc_args, Vec::new(), "", None, None)
+}
+
+fn run_codegen_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ assert!(props.revisions.is_empty(), "revisions not relevant here");
+
+ if config.llvm_filecheck.is_none() {
+ fatal(None, "missing --llvm-filecheck");
+ }
+
+ let mut proc_res = compile_test_and_save_ir(config, props, testpaths);
+ if !proc_res.status.success() {
+ fatal_proc_rec(None, "compilation failed!", &proc_res);
+ }
+
+ proc_res = check_ir_with_filecheck(config, testpaths);
+ if !proc_res.status.success() {
+ fatal_proc_rec(None,
+ "verification with 'FileCheck' failed",
+ &proc_res);
+ }
+}
+
+fn charset() -> &'static str {
+ // FreeBSD 10.1 defaults to GDB 6.1.1 which doesn't support "auto" charset
+ if cfg!(target_os = "bitrig") {
+ "auto"
+ } else if cfg!(target_os = "freebsd") {
+ "ISO-8859-1"
+ } else {
+ "UTF-8"
+ }
+}
+
+fn run_rustdoc_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ assert!(props.revisions.is_empty(), "revisions not relevant here");
+
+ let out_dir = output_base_name(config, testpaths);
+ let _ = fs::remove_dir_all(&out_dir);
+ ensure_dir(&out_dir);
+
+ let proc_res = document(config, props, testpaths, &out_dir);
+ if !proc_res.status.success() {
+ fatal_proc_rec(None, "rustdoc failed!", &proc_res);
+ }
+ let root = find_rust_src_root(config).unwrap();
+
+ let res = cmd2procres(config,
+ testpaths,
+ Command::new(&config.python)
+ .arg(root.join("src/etc/htmldocck.py"))
+ .arg(out_dir)
+ .arg(&testpaths.file));
+ if !res.status.success() {
+ fatal_proc_rec(None, "htmldocck failed!", &res);
+ }
+}
+
+fn run_codegen_units_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+
+ assert!(props.revisions.is_empty(), "revisions not relevant here");
+
+ let proc_res = compile_test(config, props, testpaths);
+
+ if !proc_res.status.success() {
+ fatal_proc_rec(None, "compilation failed!", &proc_res);
+ }
+
+ check_no_compiler_crash(None, &proc_res);
+
+ const PREFIX: &'static str = "TRANS_ITEM ";
+ const CGU_MARKER: &'static str = "@@";
+
+ let actual: Vec<TransItem> = proc_res
+ .stdout
+ .lines()
+ .filter(|line| line.starts_with(PREFIX))
+ .map(str_to_trans_item)
+ .collect();
+
+ let expected: Vec<TransItem> = errors::load_errors(&testpaths.file, None)
+ .iter()
+ .map(|e| str_to_trans_item(&e.msg[..]))
+ .collect();
+
+ let mut missing = Vec::new();
+ let mut wrong_cgus = Vec::new();
+
+ for expected_item in &expected {
+ let actual_item_with_same_name = actual.iter()
+ .find(|ti| ti.name == expected_item.name);
+
+ if let Some(actual_item) = actual_item_with_same_name {
+ if !expected_item.codegen_units.is_empty() {
+ // Also check for codegen units
+ if expected_item.codegen_units != actual_item.codegen_units {
+ wrong_cgus.push((expected_item.clone(), actual_item.clone()));
+ }
+ }
+ } else {
+ missing.push(expected_item.string.clone());
+ }
+ }
+
+ let unexpected: Vec<_> =
+ actual.iter()
+ .filter(|acgu| !expected.iter().any(|ecgu| acgu.name == ecgu.name))
+ .map(|acgu| acgu.string.clone())
+ .collect();
+
+ if !missing.is_empty() {
+ missing.sort();
+
+ println!("\nThese items should have been contained but were not:\n");
+
+ for item in &missing {
+ println!("{}", item);
+ }
+
+ println!("\n");
+ }
+
+ if !unexpected.is_empty() {
+ let sorted = {
+ let mut sorted = unexpected.clone();
+ sorted.sort();
+ sorted
+ };
+
+ println!("\nThese items were contained but should not have been:\n");
+
+ for item in sorted {
+ println!("{}", item);
+ }
+
+ println!("\n");
+ }
+
+ if !wrong_cgus.is_empty() {
+ wrong_cgus.sort_by_key(|pair| pair.0.name.clone());
+ println!("\nThe following items were assigned to wrong codegen units:\n");
+
+ for &(ref expected_item, ref actual_item) in &wrong_cgus {
+ println!("{}", expected_item.name);
+ println!(" expected: {}", codegen_units_to_str(&expected_item.codegen_units));
+ println!(" actual: {}", codegen_units_to_str(&actual_item.codegen_units));
+ println!("");
+ }
+ }
+
+ if !(missing.is_empty() && unexpected.is_empty() && wrong_cgus.is_empty())
+ {
+ panic!();
+ }
+
+ #[derive(Clone, Eq, PartialEq)]
+ struct TransItem {
+ name: String,
+ codegen_units: HashSet<String>,
+ string: String,
+ }
+
+ // [TRANS_ITEM] name [@@ (cgu)+]
+ fn str_to_trans_item(s: &str) -> TransItem {
+ let s = if s.starts_with(PREFIX) {
+ (&s[PREFIX.len()..]).trim()
+ } else {
+ s.trim()
+ };
+
+ let full_string = format!("{}{}", PREFIX, s.trim().to_owned());
+
+ let parts: Vec<&str> = s.split(CGU_MARKER)
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .collect();
+
+ let name = parts[0].trim();
+
+ let cgus = if parts.len() > 1 {
+ let cgus_str = parts[1];
+
+ cgus_str.split(" ")
+ .map(str::trim)
+ .filter(|s| !s.is_empty())
+ .map(str::to_owned)
+ .collect()
+ }
+ else {
+ HashSet::new()
+ };
+
+ TransItem {
+ name: name.to_owned(),
+ codegen_units: cgus,
+ string: full_string,
+ }
+ }
+
+ fn codegen_units_to_str(cgus: &HashSet<String>) -> String
+ {
+ let mut cgus: Vec<_> = cgus.iter().collect();
+ cgus.sort();
+
+ let mut string = String::new();
+ for cgu in cgus {
+ string.push_str(&cgu[..]);
+ string.push_str(" ");
+ }
+
+ string
+ }
+}
+
+fn run_incremental_test(config: &Config, props: &TestProps, testpaths: &TestPaths) {
+ // Basic plan for a test incremental/foo/bar.rs:
+ // - load list of revisions pass1, fail2, pass3
+ // - each should begin with `rpass`, `rfail`, or `cfail`
+ // - if `rpass`, expect compile and execution to succeed
+ // - if `cfail`, expect compilation to fail
+ // - if `rfail`, expect execution to fail
+ // - create a directory build/foo/bar.incremental
+ // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass1
+ // - because name of revision starts with "pass", expect success
+ // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C fail2
+ // - because name of revision starts with "fail", expect an error
+ // - load expected errors as usual, but filter for those that end in `[fail2]`
+ // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass3
+ // - because name of revision starts with "pass", expect success
+ // - execute build/foo/bar.exe and save output
+ //
+ // FIXME -- use non-incremental mode as an oracle? That doesn't apply
+ // to #[rustc_dirty] and clean tests I guess
+
+ assert!(!props.revisions.is_empty(), "incremental tests require a list of revisions");
+
+ let output_base_name = output_base_name(config, testpaths);
+
+ // Create the incremental workproduct directory.
+ let incremental_dir = output_base_name.with_extension("incremental");
+ if incremental_dir.exists() {
+ fs::remove_dir_all(&incremental_dir).unwrap();
+ }
+ fs::create_dir_all(&incremental_dir).unwrap();
+
+ if config.verbose {
+ print!("incremental_dir={}", incremental_dir.display());
+ }
+
+ for revision in &props.revisions {
+ let mut revision_props = props.clone();
+ header::load_props_into(&mut revision_props, &testpaths.file, Some(&revision));
+
+ revision_props.compile_flags.extend(vec![
+ format!("-Z"),
+ format!("incremental={}", incremental_dir.display()),
+ format!("--cfg"),
+ format!("{}", revision),
+ ]);
+
+ if config.verbose {
+ print!("revision={:?} revision_props={:#?}", revision, revision_props);
+ }
+
+ if revision.starts_with("rpass") {
+ run_rpass_test_revision(config, &revision_props, testpaths, Some(&revision));
+ } else if revision.starts_with("rfail") {
+ run_rfail_test_revision(config, &revision_props, testpaths, Some(&revision));
+ } else if revision.starts_with("cfail") {
+ run_cfail_test_revision(config, &revision_props, testpaths, Some(&revision));
+ } else {
+ fatal(
+ Some(revision),
+ "revision name must begin with rpass, rfail, or cfail");
+ }
+ }
+}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::env;
+use common::Config;
+
+/// Conversion table from triple OS name to Rust SYSNAME
+const OS_TABLE: &'static [(&'static str, &'static str)] = &[
+ ("android", "android"),
+ ("bitrig", "bitrig"),
+ ("darwin", "macos"),
+ ("dragonfly", "dragonfly"),
+ ("freebsd", "freebsd"),
+ ("ios", "ios"),
+ ("linux", "linux"),
+ ("mingw32", "windows"),
+ ("netbsd", "netbsd"),
+ ("openbsd", "openbsd"),
+ ("win32", "windows"),
+ ("windows", "windows"),
+ ("solaris", "solaris"),
+ ("emscripten", "emscripten"),
+];
+
+const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[
+ ("aarch64", "aarch64"),
+ ("amd64", "x86_64"),
+ ("arm", "arm"),
+ ("arm64", "aarch64"),
+ ("hexagon", "hexagon"),
+ ("i386", "x86"),
+ ("i686", "x86"),
+ ("mips", "mips"),
+ ("msp430", "msp430"),
+ ("powerpc", "powerpc"),
+ ("powerpc64", "powerpc64"),
+ ("s390x", "systemz"),
+ ("sparc", "sparc"),
+ ("x86_64", "x86_64"),
+ ("xcore", "xcore"),
+ ("asmjs", "asmjs"),
+];
+
+pub fn get_os(triple: &str) -> &'static str {
+ for &(triple_os, os) in OS_TABLE {
+ if triple.contains(triple_os) {
+ return os
+ }
+ }
+ panic!("Cannot determine OS from triple");
+}
+pub fn get_arch(triple: &str) -> &'static str {
+ for &(triple_arch, arch) in ARCH_TABLE {
+ if triple.contains(triple_arch) {
+ return arch
+ }
+ }
+ panic!("Cannot determine Architecture from triple");
+}
+
+pub fn get_env(triple: &str) -> Option<&str> {
+ triple.split('-').nth(3)
+}
+
+pub fn make_new_path(path: &str) -> String {
+ assert!(cfg!(windows));
+ // Windows just uses PATH as the library search path, so we have to
+ // maintain the current value while adding our own
+ match env::var(lib_path_env_var()) {
+ Ok(curr) => {
+ format!("{}{}{}", path, path_div(), curr)
+ }
+ Err(..) => path.to_owned()
+ }
+}
+
+pub fn lib_path_env_var() -> &'static str { "PATH" }
+fn path_div() -> &'static str { ";" }
+
+pub fn logv(config: &Config, s: String) {
+ debug!("{}", s);
+ if config.verbose { println!("{}", s); }
+}