With this PR in-place constants are handled correctly with respect to debug location assignment.
The PR also adds an (unrelated) test case for debug locations in `extern \"C\"` functions.
Fixes #22432
trpl: doc/book/index.html
doc/book/index.html: $(RUSTBOOK_EXE) $(wildcard $(S)/src/doc/trpl/*.md) | doc/
+ @$(call E, rustbook: $@)
$(Q)rm -rf doc/book
$(Q)$(RUSTBOOK) build $(S)src/doc/trpl doc/book
style: doc/style/index.html
doc/style/index.html: $(RUSTBOOK_EXE) $(wildcard $(S)/src/doc/style/*.md) | doc/
+ @$(call E, rustbook: $@)
$(Q)rm -rf doc/style
$(Q)$(RUSTBOOK) build $(S)src/doc/style doc/style
# The tests select when to use debug configuration on their own;
# remove directive, if present, from CFG_RUSTC_FLAGS (issue #7898).
-CTEST_RUSTC_FLAGS := $$(subst --cfg ndebug,,$$(CFG_RUSTC_FLAGS))
+CTEST_RUSTC_FLAGS := $$(subst -C debug-assertions,,$$(CFG_RUSTC_FLAGS))
# The tests cannot be optimized while the rest of the compiler is optimized, so
# filter out the optimization (if any) from rustc and then figure out if we need
use std::fmt;
use std::str::FromStr;
+use std::path::PathBuf;
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum Mode {
pub run_lib_path: String,
// The rustc executable
- pub rustc_path: Path,
+ pub rustc_path: PathBuf,
// The clang executable
- pub clang_path: Option<Path>,
+ pub clang_path: Option<PathBuf>,
// The llvm binaries path
- pub llvm_bin_path: Option<Path>,
+ pub llvm_bin_path: Option<PathBuf>,
// The valgrind path
pub valgrind_path: Option<String>,
pub force_valgrind: bool,
// The directory containing the tests to run
- pub src_base: Path,
+ pub src_base: PathBuf,
// The directory where programs should be built
- pub build_base: Path,
+ pub build_base: PathBuf,
// Directory for auxiliary libraries
- pub aux_base: Path,
+ pub aux_base: PathBuf,
// The name of the stage being built (stage1, etc)
pub stage_id: String,
pub filter: Option<String>,
// Write out a parseable log of tests that were run
- pub logfile: Option<Path>,
+ pub logfile: Option<PathBuf>,
// A command line to prefix program execution with,
// for running under valgrind
pub lldb_version: Option<String>,
// Path to the android tools
- pub android_cross_path: Path,
+ pub android_cross_path: PathBuf,
// Extra parameter to run adb on arm-linux-androideabi
pub adb_path: String,
#![feature(test)]
#![feature(unicode)]
#![feature(core)]
+#![feature(path)]
+#![feature(os)]
+#![feature(io)]
+#![feature(fs)]
+#![feature(net)]
#![deny(warnings)]
extern crate log;
use std::env;
+use std::fs;
use std::old_io;
-use std::old_io::fs;
+use std::path::{Path, PathBuf};
use std::thunk::Thunk;
use getopts::{optopt, optflag, reqopt};
use common::Config;
panic!()
}
- fn opt_path(m: &getopts::Matches, nm: &str) -> Path {
+ fn opt_path(m: &getopts::Matches, nm: &str) -> PathBuf {
match m.opt_str(nm) {
- Some(s) => Path::new(s),
+ Some(s) => PathBuf::new(&s),
None => panic!("no option (=path) found for {}", nm),
}
}
compile_lib_path: matches.opt_str("compile-lib-path").unwrap(),
run_lib_path: matches.opt_str("run-lib-path").unwrap(),
rustc_path: opt_path(matches, "rustc-path"),
- clang_path: matches.opt_str("clang-path").map(|s| Path::new(s)),
+ clang_path: matches.opt_str("clang-path").map(|s| PathBuf::new(&s)),
valgrind_path: matches.opt_str("valgrind-path"),
force_valgrind: matches.opt_present("force-valgrind"),
- llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| Path::new(s)),
+ llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| PathBuf::new(&s)),
src_base: opt_path(matches, "src-base"),
build_base: opt_path(matches, "build-base"),
aux_base: opt_path(matches, "aux-base"),
mode: matches.opt_str("mode").unwrap().parse().ok().expect("invalid mode"),
run_ignored: matches.opt_present("ignored"),
filter: filter,
- logfile: matches.opt_str("logfile").map(|s| Path::new(s)),
+ logfile: matches.opt_str("logfile").map(|s| PathBuf::new(&s)),
runtool: matches.opt_str("runtool"),
host_rustcflags: matches.opt_str("host-rustcflags"),
target_rustcflags: matches.opt_str("target-rustcflags"),
debug!("making tests from {:?}",
config.src_base.display());
let mut tests = Vec::new();
- let dirs = fs::readdir(&config.src_base).unwrap();
- for file in &dirs {
- let file = file.clone();
+ let dirs = fs::read_dir(&config.src_base).unwrap();
+ for file in dirs {
+ let file = file.unwrap().path();
debug!("inspecting file {:?}", file.display());
if is_test(config, &file) {
let t = make_test(config, &file, || {
_ => vec!(".rc".to_string(), ".rs".to_string())
};
let invalid_prefixes = vec!(".".to_string(), "#".to_string(), "~".to_string());
- let name = testfile.filename_str().unwrap();
+ let name = testfile.file_name().unwrap().to_str().unwrap();
let mut valid = false;
// Try to elide redundant long paths
fn shorten(path: &Path) -> String {
- let filename = path.filename_str();
- let p = path.dir_path();
- let dir = p.filename_str();
+ let filename = path.file_name().unwrap().to_str();
+ let p = path.parent().unwrap();
+ let dir = p.file_name().unwrap().to_str();
format!("{}/{}", dir.unwrap_or(""), filename.unwrap_or(""))
}
pub fn make_test_closure(config: &Config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
- // FIXME (#9639): This needs to handle non-utf8 paths
- let testfile = testfile.as_str().unwrap().to_string();
+ let testfile = testfile.to_path_buf();
test::DynTestFn(Thunk::new(move || {
- runtest::run(config, testfile)
+ runtest::run(config, &testfile)
}))
}
pub fn make_metrics_test_closure(config: &Config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
- // FIXME (#9639): This needs to handle non-utf8 paths
- let testfile = testfile.as_str().unwrap().to_string();
+ let testfile = testfile.to_path_buf();
test::DynMetricFn(box move |mm: &mut test::MetricMap| {
- runtest::run_metrics(config, testfile, mm)
+ runtest::run_metrics(config, &testfile, mm)
})
}
// except according to those terms.
use self::WhichLine::*;
-use std::old_io::{BufferedReader, File};
+use std::fs::File;
+use std::io::BufReader;
+use std::io::prelude::*;
+use std::path::Path;
pub struct ExpectedError {
pub line: uint,
/// //~| ERROR message two for that same line.
// Load any test directives embedded in the file
pub fn load_errors(testfile: &Path) -> Vec<ExpectedError> {
- let mut rdr = BufferedReader::new(File::open(testfile).unwrap());
+ let rdr = BufReader::new(File::open(testfile).unwrap());
// `last_nonfollow_error` tracks the most recently seen
// line with an error template that did not use the
// except according to those terms.
use std::env;
+use std::fs::File;
+use std::io::BufReader;
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
use common::Config;
use common;
pub run_flags: Option<String>,
// If present, the name of a file that this test should match when
// pretty-printed
- pub pp_exact: Option<Path>,
+ pub pp_exact: Option<PathBuf>,
// Modules from aux directory that should be compiled
pub aux_builds: Vec<String> ,
// Environment settings to use during execution
let mut pretty_mode = None;
let mut pretty_compare_only = false;
let mut forbid_output = Vec::new();
- iter_header(testfile, |ln| {
+ iter_header(testfile, &mut |ln| {
match parse_error_pattern(ln) {
Some(ep) => error_patterns.push(ep),
None => ()
}
}
- let val = iter_header(testfile, |ln| {
+ let val = iter_header(testfile, &mut |ln| {
!parse_name_directive(ln, "ignore-test") &&
!parse_name_directive(ln, &ignore_target(config)) &&
!parse_name_directive(ln, &ignore_stage(config)) &&
!val
}
-fn iter_header<F>(testfile: &Path, mut it: F) -> bool where
- F: FnMut(&str) -> bool,
-{
- use std::old_io::{BufferedReader, File};
-
- let mut rdr = BufferedReader::new(File::open(testfile).unwrap());
+fn iter_header(testfile: &Path, it: &mut FnMut(&str) -> bool) -> bool {
+ let rdr = BufReader::new(File::open(testfile).unwrap());
for ln in rdr.lines() {
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
})
}
-fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
+fn parse_pp_exact(line: &str, testfile: &Path) -> Option<PathBuf> {
match parse_name_value_directive(line, "pp-exact") {
- Some(s) => Some(Path::new(s)),
+ Some(s) => Some(PathBuf::new(&s)),
None => {
if parse_name_directive(line, "pp-exact") {
- testfile.filename().map(|s| Path::new(s))
+ testfile.file_name().map(|s| PathBuf::new(s))
} else {
None
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::old_io::process::{ProcessExit, Command, Process, ProcessOutput};
+use std::process::{ExitStatus, Command, Child, Output, Stdio};
+use std::io::prelude::*;
use std::dynamic_lib::DynamicLibrary;
fn add_target_env(cmd: &mut Command, lib_path: &str, aux_path: Option<&str>) {
let var = DynamicLibrary::envvar();
let newpath = DynamicLibrary::create_path(&path);
let newpath = String::from_utf8(newpath).unwrap();
- cmd.env(var.to_string(), newpath);
+ cmd.env(var, &newpath);
}
-pub struct Result {pub status: ProcessExit, pub out: String, pub err: String}
+pub struct Result {pub status: ExitStatus, pub out: String, pub err: String}
pub fn run(lib_path: &str,
prog: &str,
input: Option<String>) -> Option<Result> {
let mut cmd = Command::new(prog);
- cmd.args(args);
+ cmd.args(args)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
add_target_env(&mut cmd, lib_path, aux_path);
for (key, val) in env {
- cmd.env(key, val);
+ cmd.env(&key, &val);
}
match cmd.spawn() {
if let Some(input) = input {
process.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap();
}
- let ProcessOutput { status, output, error } =
+ let Output { status, stdout, stderr } =
process.wait_with_output().unwrap();
Some(Result {
status: status,
- out: String::from_utf8(output).unwrap(),
- err: String::from_utf8(error).unwrap()
+ out: String::from_utf8(stdout).unwrap(),
+ err: String::from_utf8(stderr).unwrap()
})
},
Err(..) => None
aux_path: Option<&str>,
args: &[String],
env: Vec<(String, String)> ,
- input: Option<String>) -> Option<Process> {
+ input: Option<String>) -> Option<Child> {
let mut cmd = Command::new(prog);
- cmd.args(args);
+ cmd.args(args)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped());
add_target_env(&mut cmd, lib_path, aux_path);
for (key, val) in env {
- cmd.env(key, val);
+ cmd.env(&key, &val);
}
match cmd.spawn() {
use self::TargetLocation::*;
use common::Config;
-use common::{CompileFail, ParseFail, Pretty, RunFail, RunPass, RunPassValgrind, DebugInfoGdb};
-use common::{Codegen, DebugInfoLldb};
+use common::{CompileFail, ParseFail, Pretty, RunFail, RunPass, RunPassValgrind};
+use common::{Codegen, DebugInfoLldb, DebugInfoGdb};
use errors;
use header::TestProps;
use header;
use procsrv;
use util::logv;
-#[cfg(target_os = "windows")]
-use util;
-
-#[cfg(target_os = "windows")]
-use std::ascii::AsciiExt;
-use std::old_io::File;
-use std::old_io::fs::PathExtensions;
-use std::old_io::fs;
-use std::old_io::net::tcp;
-use std::old_io::process::ProcessExit;
-use std::old_io::process;
-use std::old_io::timer;
-use std::old_io;
+
use std::env;
+use std::ffi::OsStr;
+use std::fmt;
+use std::fs::{self, File};
+use std::io::BufReader;
+use std::io::prelude::*;
use std::iter::repeat;
+use std::net::TcpStream;
+use std::old_io::timer;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output, ExitStatus};
use std::str;
-use std::string::String;
-use std::thread;
use std::time::Duration;
use test::MetricMap;
-pub fn run(config: Config, testfile: String) {
+pub fn run(config: Config, testfile: &Path) {
match &*config.target {
"arm-linux-androideabi" | "aarch64-linux-android" => {
run_metrics(config, testfile, &mut _mm);
}
-pub fn run_metrics(config: Config, testfile: String, mm: &mut MetricMap) {
+pub fn run_metrics(config: Config, testfile: &Path, mm: &mut MetricMap) {
if config.verbose {
// We're going to be dumping a lot of info. Start on a new line.
print!("\n\n");
}
- let testfile = Path::new(testfile);
debug!("running {:?}", testfile.display());
let props = header::load_props(&testfile);
debug!("loaded props");
};
// The value our Makefile configures valgrind to return on failure
- const VALGRIND_ERR: int = 100;
- if proc_res.status.matches_exit_status(VALGRIND_ERR) {
+ const VALGRIND_ERR: i32 = 100;
+ if proc_res.status.code() == Some(VALGRIND_ERR) {
fatal_proc_rec("run-fail test isn't valgrind-clean!", &proc_res);
}
fn check_correct_failure_status(proc_res: &ProcRes) {
// The value the rust runtime returns on failure
- const RUST_ERR: int = 101;
- if !proc_res.status.matches_exit_status(RUST_ERR) {
+ const RUST_ERR: i32 = 101;
+ if proc_res.status.code() != Some(RUST_ERR) {
fatal_proc_rec(
- &format!("failure produced the wrong error: {:?}",
+ &format!("failure produced the wrong error: {}",
proc_res.status),
proc_res);
}
let rounds =
match props.pp_exact { Some(_) => 1, None => 2 };
- let src = File::open(testfile).read_to_end().unwrap();
- let src = String::from_utf8(src.clone()).unwrap();
+ let mut src = String::new();
+ File::open(testfile).unwrap().read_to_string(&mut src).unwrap();
let mut srcs = vec!(src);
let mut round = 0;
let mut expected = match props.pp_exact {
Some(ref file) => {
- let filepath = testfile.dir_path().join(file);
- let s = File::open(&filepath).read_to_end().unwrap();
- String::from_utf8(s).unwrap()
+ let filepath = testfile.parent().unwrap().join(file);
+ let mut s = String::new();
+ File::open(&filepath).unwrap().read_to_string(&mut s).unwrap();
+ s
}
None => { srcs[srcs.len() - 2].clone() }
};
pretty_type.to_string()),
props.exec_env.clone(),
&config.compile_lib_path,
- Some(aux_dir.as_str().unwrap()),
+ Some(aux_dir.to_str().unwrap()),
Some(src))
}
pretty_type,
format!("--target={}", config.target),
"-L".to_string(),
- aux_dir.as_str().unwrap().to_string());
+ aux_dir.to_str().unwrap().to_string());
args.extend(split_maybe_args(&config.target_rustcflags).into_iter());
args.extend(split_maybe_args(&props.compile_flags).into_iter());
return ProcArgs {
- prog: config.rustc_path.as_str().unwrap().to_string(),
+ prog: config.rustc_path.to_str().unwrap().to_string(),
args: args,
};
}
"--crate-type=lib".to_string(),
format!("--target={}", target),
"-L".to_string(),
- config.build_base.as_str().unwrap().to_string(),
+ config.build_base.to_str().unwrap().to_string(),
"-L".to_string(),
- aux_dir.as_str().unwrap().to_string());
+ aux_dir.to_str().unwrap().to_string());
args.extend(split_maybe_args(&config.target_rustcflags).into_iter());
args.extend(split_maybe_args(&props.compile_flags).into_iter());
// FIXME (#9639): This needs to handle non-utf8 paths
return ProcArgs {
- prog: config.rustc_path.as_str().unwrap().to_string(),
+ prog: config.rustc_path.to_str().unwrap().to_string(),
args: args,
};
}
// write debugger script
let mut script_str = String::with_capacity(2048);
script_str.push_str("set charset UTF-8\n");
- script_str.push_str(&format!("file {}\n", exe_file.as_str().unwrap()));
+ script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap()));
script_str.push_str("target remote :5039\n");
script_str.push_str(&format!("set solib-search-path \
./{}/stage2/lib/rustlib/{}/lib/\n",
config.host, config.target));
for line in breakpoint_lines.iter() {
script_str.push_str(&format!("break {:?}:{}\n",
- testfile.filename_display(),
+ testfile.file_name().unwrap()
+ .to_string_lossy(),
*line)[..]);
}
script_str.push_str(&cmds);
- script_str.push_str("quit\n");
+ script_str.push_str("\nquit\n");
debug!("script_str = {}", script_str);
dump_output_file(config,
None,
&[
"push".to_string(),
- exe_file.as_str().unwrap().to_string(),
+ exe_file.to_str().unwrap().to_string(),
config.adb_test_dir.clone()
],
vec!(("".to_string(), "".to_string())),
if config.target.contains("aarch64")
{"64"} else {""},
config.adb_test_dir.clone(),
- str::from_utf8(
- exe_file.filename()
- .unwrap()).unwrap());
+ exe_file.file_name().unwrap().to_str()
+ .unwrap());
let mut process = procsrv::run_background("",
&config.adb_path
loop {
//waiting 1 second for gdbserver start
timer::sleep(Duration::milliseconds(1000));
- let result = thread::spawn(move || {
- tcp::TcpStream::connect("127.0.0.1:5039").unwrap();
- }).join();
- if result.is_err() {
- continue;
+ if TcpStream::connect("127.0.0.1:5039").is_ok() {
+ break
}
- break;
}
- let tool_path = match config.android_cross_path.as_str() {
+ let tool_path = match config.android_cross_path.to_str() {
Some(x) => x.to_string(),
None => fatal("cannot find android cross path")
};
vec!("-quiet".to_string(),
"-batch".to_string(),
"-nx".to_string(),
- format!("-command={}", debugger_script.as_str().unwrap()));
+ format!("-command={}", debugger_script.to_str().unwrap()));
let mut gdb_path = tool_path;
gdb_path.push_str(&format!("/bin/{}-gdb", config.target));
};
debugger_run_result = ProcRes {
- status: status,
+ status: Status::Normal(status),
stdout: out,
stderr: err,
cmdline: cmdline
};
- if process.signal_kill().is_err() {
+ if process.kill().is_err() {
println!("Adb process is already finished.");
}
}
.expect("Could not find Rust source root");
let rust_pp_module_rel_path = Path::new("./src/etc");
let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
- .as_str()
+ .to_str()
.unwrap()
.to_string();
// write debugger script
// GDB's script auto loading safe path
script_str.push_str(
&format!("add-auto-load-safe-path {}\n",
- rust_pp_module_abs_path.replace("\\", "\\\\"))
+ rust_pp_module_abs_path.replace(r"\", r"\\"))
);
}
}
script_str.push_str("set print pretty off\n");
// Add the pretty printer directory to GDB's source-file search path
- script_str.push_str(&format!("directory {}\n", rust_pp_module_abs_path)[..]);
+ script_str.push_str(&format!("directory {}\n",
+ rust_pp_module_abs_path));
// Load the target executable
script_str.push_str(&format!("file {}\n",
- exe_file.as_str().unwrap().replace("\\", "\\\\"))[..]);
+ exe_file.to_str().unwrap()
+ .replace(r"\", r"\\")));
// Add line breakpoints
for line in &breakpoint_lines {
script_str.push_str(&format!("break '{}':{}\n",
- testfile.filename_display(),
- *line)[..]);
+ testfile.file_name().unwrap()
+ .to_string_lossy(),
+ *line));
}
script_str.push_str(&cmds);
- script_str.push_str("quit\n");
+ script_str.push_str("\nquit\n");
debug!("script_str = {}", script_str);
dump_output_file(config,
"debugger.script");
// run debugger script with gdb
- #[cfg(windows)]
- fn debugger() -> String {
- "gdb.exe".to_string()
- }
- #[cfg(unix)]
- fn debugger() -> String {
- "gdb".to_string()
+ fn debugger() -> &'static str {
+ if cfg!(windows) {"gdb.exe"} else {"gdb"}
}
let debugger_script = make_out_name(config, testfile, "debugger.script");
vec!("-quiet".to_string(),
"-batch".to_string(),
"-nx".to_string(),
- format!("-command={}", debugger_script.as_str().unwrap()));
+ format!("-command={}", debugger_script.to_str().unwrap()));
let proc_args = ProcArgs {
- prog: debugger(),
+ prog: debugger().to_string(),
args: debugger_opts,
};
check_debugger_output(&debugger_run_result, &check_lines);
}
-fn find_rust_src_root(config: &Config) -> Option<Path> {
+fn find_rust_src_root(config: &Config) -> Option<PathBuf> {
let mut path = config.src_base.clone();
let path_postfix = Path::new("src/etc/lldb_batchmode.py");
}
fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path) {
- use std::old_io::process::{Command, ProcessOutput};
-
if config.lldb_python_dir.is_none() {
fatal("Can't run LLDB test because LLDB's python path is not set.");
}
.expect("Could not find Rust source root");
let rust_pp_module_rel_path = Path::new("./src/etc/lldb_rust_formatters.py");
let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path)
- .as_str()
+ .to_str()
.unwrap()
.to_string();
- script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[..])[..]);
+ script_str.push_str(&format!("command script import {}\n",
+ &rust_pp_module_abs_path[..])[..]);
script_str.push_str("type summary add --no-value ");
script_str.push_str("--python-function lldb_rust_formatters.print_val ");
script_str.push_str("-x \".*\" --category Rust\n");
}
// Finally, quit the debugger
- script_str.push_str("quit\n");
+ script_str.push_str("\nquit\n");
// Write the script into a file
debug!("script_str = {}", script_str);
rust_src_root: &Path)
-> ProcRes {
// Prepare the lldb_batchmode which executes the debugger script
- let lldb_script_path = rust_src_root.join(Path::new("./src/etc/lldb_batchmode.py"));
+ let lldb_script_path = rust_src_root.join("src/etc/lldb_batchmode.py");
let mut cmd = Command::new("python");
- cmd.arg(lldb_script_path)
+ cmd.arg(&lldb_script_path)
.arg(test_executable)
.arg(debugger_script)
- .env_set_all(&[("PYTHONPATH", config.lldb_python_dir.clone().unwrap())]);
-
- let (status, out, err) = match cmd.spawn() {
- Ok(process) => {
- let ProcessOutput { status, output, error } =
- process.wait_with_output().unwrap();
+ .env("PYTHONPATH", config.lldb_python_dir.as_ref().unwrap());
+ let (status, out, err) = match cmd.output() {
+ Ok(Output { status, stdout, stderr }) => {
(status,
- String::from_utf8(output).unwrap(),
- String::from_utf8(error).unwrap())
+ String::from_utf8(stdout).unwrap(),
+ String::from_utf8(stderr).unwrap())
},
Err(e) => {
fatal(&format!("Failed to setup Python process for \
dump_output(config, test_executable, &out, &err);
return ProcRes {
- status: status,
+ status: Status::Normal(status),
stdout: out,
stderr: err,
cmdline: format!("{:?}", cmd)
fn parse_debugger_commands(file_path: &Path, debugger_prefix: &str)
-> DebuggerCommands {
- use std::old_io::{BufferedReader, File};
-
let command_directive = format!("{}-command", debugger_prefix);
let check_directive = format!("{}-check", debugger_prefix);
let mut commands = vec!();
let mut check_lines = vec!();
let mut counter = 1;
- let mut reader = BufferedReader::new(File::open(file_path).unwrap());
+ let reader = BufReader::new(File::open(file_path).unwrap());
for line in reader.lines() {
match line {
Ok(line) => {
let prefixes = expected_errors.iter().map(|ee| {
format!("{}:{}:", testfile.display(), ee.line)
- }).collect::<Vec<String> >();
-
- #[cfg(windows)]
- fn prefix_matches( line : &str, prefix : &str ) -> bool {
- line.to_ascii_lowercase().starts_with(&prefix.to_ascii_lowercase())
- }
-
- #[cfg(unix)]
- fn prefix_matches( line : &str, prefix : &str ) -> bool {
- line.starts_with( prefix )
+ }).collect::<Vec<String>>();
+
+ fn prefix_matches(line: &str, prefix: &str) -> bool {
+ use std::ascii::AsciiExt;
+ // On windows just translate all '\' path separators to '/'
+ let line = line.replace(r"\", "/");
+ if cfg!(windows) {
+ line.to_ascii_lowercase().starts_with(&prefix.to_ascii_lowercase())
+ } else {
+ line.starts_with(prefix)
+ }
}
// A multi-line error will have followup lines which will always
}
struct ProcRes {
- status: ProcessExit,
+ status: Status,
stdout: String,
stderr: String,
cmdline: String,
}
+enum Status {
+ Parsed(i32),
+ Normal(ExitStatus),
+}
+
+impl Status {
+ fn code(&self) -> Option<i32> {
+ match *self {
+ Status::Parsed(i) => Some(i),
+ Status::Normal(ref e) => e.code(),
+ }
+ }
+
+ fn success(&self) -> bool {
+ match *self {
+ Status::Parsed(i) => i == 0,
+ Status::Normal(ref e) => e.success(),
+ }
+ }
+}
+
+impl fmt::Display for Status {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ Status::Parsed(i) => write!(f, "exit code: {}", i),
+ Status::Normal(ref e) => e.fmt(f),
+ }
+ }
+}
+
fn compile_test(config: &Config, props: &TestProps,
testfile: &Path) -> ProcRes {
compile_test_(config, props, testfile, &[])
let aux_dir = aux_output_dir_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
let mut link_args = vec!("-L".to_string(),
- aux_dir.as_str().unwrap().to_string());
+ aux_dir.to_str().unwrap().to_string());
link_args.extend(extra_args.iter().cloned());
let args = make_compile_args(config,
props,
make_run_args(config, props, testfile),
env,
&config.run_lib_path,
- Some(aux_dir.as_str().unwrap()),
+ Some(aux_dir.to_str().unwrap()),
None)
}
}
let aux_dir = aux_output_dir_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
- let extra_link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string());
+ let extra_link_args = vec!("-L".to_string(), aux_dir.to_str().unwrap().to_string());
for rel_ab in &props.aux_builds {
let abs_ab = config.aux_base.join(rel_ab);
crate_type,
|a,b| {
let f = make_lib_name(a, b, testfile);
- TargetLocation::ThisDirectory(f.dir_path())
+ let parent = f.parent().unwrap();
+ TargetLocation::ThisDirectory(parent.to_path_buf())
},
&abs_ab);
let auxres = compose_and_run(config,
aux_args,
Vec::new(),
&config.compile_lib_path,
- Some(aux_dir.as_str().unwrap()),
+ Some(aux_dir.to_str().unwrap()),
None);
if !auxres.status.success() {
fatal_proc_rec(
args,
Vec::new(),
&config.compile_lib_path,
- Some(aux_dir.as_str().unwrap()),
+ Some(aux_dir.to_str().unwrap()),
input)
}
fn ensure_dir(path: &Path) {
if path.is_dir() { return; }
- fs::mkdir(path, old_io::USER_RWX).unwrap();
+ fs::create_dir(path).unwrap();
}
fn compose_and_run(config: &Config, testfile: &Path,
}
enum TargetLocation {
- ThisFile(Path),
- ThisDirectory(Path),
+ ThisFile(PathBuf),
+ ThisDirectory(PathBuf),
}
fn make_compile_args<F>(config: &Config,
&*config.target
};
// FIXME (#9639): This needs to handle non-utf8 paths
- let mut args = vec!(testfile.as_str().unwrap().to_string(),
+ let mut args = vec!(testfile.to_str().unwrap().to_string(),
"-L".to_string(),
- config.build_base.as_str().unwrap().to_string(),
+ config.build_base.to_str().unwrap().to_string(),
format!("--target={}", target));
args.push_all(&extras);
if !props.no_prefer_dynamic {
path
}
};
- args.push(path.as_str().unwrap().to_string());
+ args.push(path.to_str().unwrap().to_string());
if props.force_host {
args.extend(split_maybe_args(&config.host_rustcflags).into_iter());
} else {
}
args.extend(split_maybe_args(&props.compile_flags).into_iter());
return ProcArgs {
- prog: config.rustc_path.as_str().unwrap().to_string(),
+ prog: config.rustc_path.to_str().unwrap().to_string(),
args: args,
};
}
-fn make_lib_name(config: &Config, auxfile: &Path, testfile: &Path) -> Path {
+fn make_lib_name(config: &Config, auxfile: &Path, testfile: &Path) -> PathBuf {
// what we return here is not particularly important, as it
// happens; rustc ignores everything except for the directory.
let auxname = output_testname(auxfile);
aux_output_dir_name(config, testfile).join(&auxname)
}
-fn make_exe_name(config: &Config, testfile: &Path) -> Path {
+fn make_exe_name(config: &Config, testfile: &Path) -> PathBuf {
let mut f = output_base_name(config, testfile);
if !env::consts::EXE_SUFFIX.is_empty() {
- let mut fname = f.filename().unwrap().to_vec();
- fname.extend(env::consts::EXE_SUFFIX.bytes());
- f.set_filename(fname);
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push_os_str(OsStr::from_str(env::consts::EXE_SUFFIX));
+ f.set_file_name(&fname);
}
f
}
let exe_file = make_exe_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
- args.push(exe_file.as_str().unwrap().to_string());
+ args.push(exe_file.to_str().unwrap().to_string());
// Add the arguments in the run_flags directive
args.extend(split_maybe_args(&props.run_flags).into_iter());
input).expect(&format!("failed to exec `{}`", prog));
dump_output(config, testfile, &out, &err);
return ProcRes {
- status: status,
+ status: Status::Normal(status),
stdout: out,
stderr: err,
cmdline: cmdline,
};
}
-// Linux and mac don't require adjusting the library search path
-#[cfg(unix)]
-fn make_cmdline(_libpath: &str, prog: &str, args: &[String]) -> String {
- format!("{} {}", prog, args.connect(" "))
-}
-
-#[cfg(windows)]
fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String {
+ use util;
- // Build the LD_LIBRARY_PATH variable as it would be seen on the command line
- // for diagnostic purposes
- fn lib_path_cmd_prefix(path: &str) -> String {
- format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
- }
+ // Linux and mac don't require adjusting the library search path
+ if cfg!(unix) {
+ format!("{} {}", prog, args.connect(" "))
+ } else {
+ // Build the LD_LIBRARY_PATH variable as it would be seen on the command line
+ // for diagnostic purposes
+ fn lib_path_cmd_prefix(path: &str) -> String {
+ format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
+ }
- format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.connect(" "))
+ format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.connect(" "))
+ }
}
fn dump_output(config: &Config, testfile: &Path, out: &str, err: &str) {
fn dump_output_file(config: &Config, testfile: &Path,
out: &str, extension: &str) {
let outfile = make_out_name(config, testfile, extension);
- File::create(&outfile).write_all(out.as_bytes()).unwrap();
+ File::create(&outfile).unwrap().write_all(out.as_bytes()).unwrap();
}
-fn make_out_name(config: &Config, testfile: &Path, extension: &str) -> Path {
+fn make_out_name(config: &Config, testfile: &Path, extension: &str) -> PathBuf {
output_base_name(config, testfile).with_extension(extension)
}
-fn aux_output_dir_name(config: &Config, testfile: &Path) -> Path {
+fn aux_output_dir_name(config: &Config, testfile: &Path) -> PathBuf {
let f = output_base_name(config, testfile);
- let mut fname = f.filename().unwrap().to_vec();
- fname.extend("libaux".bytes());
- f.with_filename(fname)
+ let mut fname = f.file_name().unwrap().to_os_string();
+ fname.push_os_str(OsStr::from_str("libaux"));
+ f.with_file_name(&fname)
}
-fn output_testname(testfile: &Path) -> Path {
- Path::new(testfile.filestem().unwrap())
+fn output_testname(testfile: &Path) -> PathBuf {
+ PathBuf::new(testfile.file_stem().unwrap())
}
-fn output_base_name(config: &Config, testfile: &Path) -> Path {
+fn output_base_name(config: &Config, testfile: &Path) -> PathBuf {
config.build_base
.join(&output_testname(testfile))
.with_extension(&config.stage_id)
Some("".to_string()))
.expect(&format!("failed to exec `{}`", config.adb_path));
- let mut exitcode: int = 0;
+ let mut exitcode: i32 = 0;
for c in exitcode_out.chars() {
if !c.is_numeric() { break; }
exitcode = exitcode * 10 + match c {
- '0' ... '9' => c as int - ('0' as int),
+ '0' ... '9' => c as i32 - ('0' as i32),
_ => 101,
}
}
&stderr_out);
ProcRes {
- status: process::ProcessExit::ExitStatus(exitcode),
+ status: Status::Parsed(exitcode),
stdout: stdout_out,
stderr: stderr_out,
cmdline: cmdline
fn _arm_push_aux_shared_library(config: &Config, testfile: &Path) {
let tdir = aux_output_dir_name(config, testfile);
- let dirs = fs::readdir(&tdir).unwrap();
- for file in &dirs {
- if file.extension_str() == Some("so") {
+ let dirs = fs::read_dir(&tdir).unwrap();
+ for file in dirs {
+ let file = file.unwrap().path();
+ if file.extension().and_then(|s| s.to_str()) == Some("so") {
// FIXME (#9639): This needs to handle non-utf8 paths
let copy_result = procsrv::run("",
&config.adb_path,
None,
&[
"push".to_string(),
- file.as_str()
+ file.to_str()
.unwrap()
.to_string(),
config.adb_test_dir.to_string(),
// codegen tests (vs. clang)
-fn append_suffix_to_stem(p: &Path, suffix: &str) -> Path {
+fn append_suffix_to_stem(p: &Path, suffix: &str) -> PathBuf {
if suffix.len() == 0 {
- (*p).clone()
+ p.to_path_buf()
} else {
- let mut stem = p.filestem().unwrap().to_vec();
- stem.extend("-".bytes());
- stem.extend(suffix.bytes());
- p.with_filename(stem)
+ let mut stem = p.file_stem().unwrap().to_os_string();
+ stem.push_os_str(OsStr::from_str("-"));
+ stem.push_os_str(OsStr::from_str(suffix));
+ p.with_file_name(&stem)
}
}
let aux_dir = aux_output_dir_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
let mut link_args = vec!("-L".to_string(),
- aux_dir.as_str().unwrap().to_string());
+ aux_dir.to_str().unwrap().to_string());
let llvm_args = vec!("--emit=llvm-bc,obj".to_string(),
"--crate-type=lib".to_string());
link_args.extend(llvm_args.into_iter());
props,
link_args,
|a, b| TargetLocation::ThisDirectory(
- output_base_name(a, b).dir_path()),
+ output_base_name(a, b).parent()
+ .unwrap().to_path_buf()),
testfile);
compose_and_run_compiler(config, props, testfile, args, None)
}
let testcc = testfile.with_extension("cc");
let proc_args = ProcArgs {
// FIXME (#9639): This needs to handle non-utf8 paths
- prog: config.clang_path.as_ref().unwrap().as_str().unwrap().to_string(),
+ prog: config.clang_path.as_ref().unwrap().to_str().unwrap().to_string(),
args: vec!("-c".to_string(),
"-emit-llvm".to_string(),
"-o".to_string(),
- bitcodefile.as_str().unwrap().to_string(),
- testcc.as_str().unwrap().to_string())
+ bitcodefile.to_str().unwrap().to_string(),
+ testcc.to_str().unwrap().to_string())
};
compose_and_run(config, testfile, proc_args, Vec::new(), "", None, None)
}
let prog = config.llvm_bin_path.as_ref().unwrap().join("llvm-extract");
let proc_args = ProcArgs {
// FIXME (#9639): This needs to handle non-utf8 paths
- prog: prog.as_str().unwrap().to_string(),
+ prog: prog.to_str().unwrap().to_string(),
args: vec!(format!("-func={}", fname),
- format!("-o={}", extracted_bc.as_str().unwrap()),
- bitcodefile.as_str().unwrap().to_string())
+ format!("-o={}", extracted_bc.to_str().unwrap()),
+ bitcodefile.to_str().unwrap().to_string())
};
compose_and_run(config, testfile, proc_args, Vec::new(), "", None, None)
}
let prog = config.llvm_bin_path.as_ref().unwrap().join("llvm-dis");
let proc_args = ProcArgs {
// FIXME (#9639): This needs to handle non-utf8 paths
- prog: prog.as_str().unwrap().to_string(),
- args: vec!(format!("-o={}", extracted_ll.as_str().unwrap()),
- extracted_bc.as_str().unwrap().to_string())
+ prog: prog.to_str().unwrap().to_string(),
+ args: vec!(format!("-o={}", extracted_ll.to_str().unwrap()),
+ extracted_bc.to_str().unwrap().to_string())
};
compose_and_run(config, testfile, proc_args, Vec::new(), "", None, None)
}
fn count_extracted_lines(p: &Path) -> uint {
- let x = File::open(&p.with_extension("ll")).read_to_end().unwrap();
+ let mut x = Vec::new();
+ File::open(&p.with_extension("ll")).unwrap().read_to_end(&mut x).unwrap();
let x = str::from_utf8(&x).unwrap();
x.lines().count()
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use common::Config;
-
-#[cfg(target_os = "windows")]
use std::env;
+use common::Config;
/// Conversion table from triple OS name to Rust SYSNAME
const OS_TABLE: &'static [(&'static str, &'static str)] = &[
panic!("Cannot determine OS from triple");
}
-#[cfg(target_os = "windows")]
pub fn make_new_path(path: &str) -> String {
-
+ assert!(cfg!(windows));
// Windows just uses PATH as the library search path, so we have to
// maintain the current value while adding our own
match env::var(lib_path_env_var()) {
- Ok(curr) => {
- format!("{}{}{}", path, path_div(), curr)
- }
- Err(..) => path.to_string()
+ Ok(curr) => {
+ format!("{}{}{}", path, path_div(), curr)
+ }
+ Err(..) => path.to_string()
}
}
-#[cfg(target_os = "windows")]
pub fn lib_path_env_var() -> &'static str { "PATH" }
-
-#[cfg(target_os = "windows")]
-pub fn path_div() -> &'static str { ";" }
+fn path_div() -> &'static str { ";" }
pub fn logv(config: &Config, s: String) {
debug!("{}", s);
let guards: Vec<_> = (0..3).map(|i| {
Thread::scoped(move || {
- for j in 0..3 { numbers[j] += 1 }
+ numbers[i] += 1;
+ println!("numbers[{}] is {}", i, numbers[i]);
});
}).collect();
}
It gives us this error:
```text
-7:29: 9:10 error: cannot move out of captured outer variable in an `FnMut` closure
-7 Thread::scoped(move || {
-8 for j in 0..3 { numbers[j] += 1 }
-9 });
+7:25: 10:6 error: cannot move out of captured outer variable in an `FnMut` closure
+7 Thread::scoped(move || {
+8 numbers[i] += 1;
+9 println!("numbers[{}] is {}", i, numbers[i]);
+10 });
+error: aborting due to previous error
```
It mentions that "captured outer variable in an `FnMut` closure".
* `quote_pat!`
* `quote_stmt!`
* `quote_tokens!`
+* `quote_matcher!`
* `quote_ty!`
+* `quote_attr!`
+
+Keep in mind that when `$name : ident` appears in the input to
+`quote_tokens!`, the result contains unquoted `name` followed by two tokens.
+However, input of the same form passed to `quote_matcher!` becomes a
+quasiquoted MBE-matcher of a nonterminal. No unquotation happens. Otherwise
+the result of `quote_matcher!` is identical to that of `quote_tokens!`.
Documentation is very limited at the moment.
types, e.g. as the return type of a public function.
This capability may be removed in the future.
+* `allow_internal_unstable` - Allows `macro_rules!` macros to be tagged with the
+ `#[allow_internal_unstable]` attribute, designed
+ to allow `std` macros to call
+ `#[unstable]`/feature-gated functionality
+ internally without imposing on callers
+ (i.e. making them behave like function calls in
+ terms of encapsulation).
+
If a feature is promoted to a language feature, then all existing programs will
start to receive compilation warnings about #[feature] directives which enabled
the new feature (because the directive is no longer necessary). However, if a
brackets `[]` with `vec!`. Rust allows you to use either in either situation,
this is just convention.)
+There's an alternate form of `vec!` for repeating an initial value:
+
+```
+let v = vec![0; 10]; // ten zeroes
+```
+
You can get the length of, iterate over, and subscript vectors just like
arrays. In addition, (mutable) vectors can grow automatically:
## Tuples
-The first compound data type we're going to talk about are called *tuples*.
-Tuples are an ordered list of a fixed size. Like this:
+The first compound data type we're going to talk about is called the *tuple*.
+A tuple is an ordered list of fixed size. Like this:
```rust
let x = (1, "hello");
```
An `enum` variant can be defined as most normal types. Below are some example
-types have been listed which also would be allowed in an `enum`.
+types which also would be allowed in an `enum`.
```rust
struct Empty;
import subprocess
import re
import os
-from licenseck import *
+from licenseck import check_license
import snapshot
err = 0
tab_flag = "ignore-tidy-tab"
linelength_flag = "ignore-tidy-linelength"
-# Be careful to support Python 2.4, 2.6, and 3.x here!
-config_proc = subprocess.Popen(["git", "config", "core.autocrlf"],
- stdout=subprocess.PIPE)
-result = config_proc.communicate()[0]
-
-true = "true".encode('utf8')
-autocrlf = result.strip() == true if result is not None else False
+interesting_files = ['.rs', '.py', '.js', '.sh', '.c', '.h']
+uninteresting_files = ['miniz.c', 'jquery', 'rust_android_dummy']
def report_error_name_no(name, no, s):
if not check_license(name, contents):
report_error_name_no(name, 1, "incorrect license")
+
+def update_counts(current_name):
+ global file_counts
+ global count_other_linted_files
+
+ _, ext = os.path.splitext(current_name)
+
+ if ext in interesting_files:
+ file_counts[ext] += 1
+ else:
+ count_other_linted_files += 1
+
+
+def interesting_file(f):
+ if any(x in f for x in uninteresting_files):
+ return False
+
+ return any(os.path.splitext(f)[1] == ext for ext in interesting_files)
+
+
+# Be careful to support Python 2.4, 2.6, and 3.x here!
+config_proc = subprocess.Popen(["git", "config", "core.autocrlf"],
+ stdout=subprocess.PIPE)
+result = config_proc.communicate()[0]
+
+true = "true".encode('utf8')
+autocrlf = result.strip() == true if result is not None else False
+
current_name = ""
current_contents = ""
check_tab = True
src_dir = sys.argv[1]
-try:
- count_lines = 0
- count_non_blank_lines = 0
+count_lines = 0
+count_non_blank_lines = 0
+count_other_linted_files = 0
- interesting_files = ['.rs', '.py', '.js', '.sh', '.c', '.h']
+file_counts = {ext: 0 for ext in interesting_files}
- file_counts = {ext: 0 for ext in interesting_files}
- file_counts['other'] = 0
-
- def update_counts(current_name):
- global file_counts
- _, ext = os.path.splitext(current_name)
-
- if ext in file_counts:
- file_counts[ext] += 1
- else:
- file_counts['other'] += 1
-
- all_paths = set()
+all_paths = set()
+try:
for (dirpath, dirnames, filenames) in os.walk(src_dir):
-
# Skip some third-party directories
skippable_dirs = {
'src/jemalloc',
if any(d in dirpath for d in skippable_dirs):
continue
- def interesting_file(f):
- if "miniz.c" in f \
- or "jquery" in f \
- or "rust_android_dummy" in f:
- return False
-
- return any(os.path.splitext(f)[1] == ext for ext in interesting_files)
-
file_names = [os.path.join(dirpath, f) for f in filenames
if interesting_file(f)
and not f.endswith("_gen.rs")
report_err("UTF-8 decoding error " + str(e))
print
-for ext in file_counts:
- print "* linted " + str(file_counts[ext]) + " " + ext + " files"
-print "* total lines of code: " + str(count_lines)
-print "* total non-blank lines of code: " + str(count_non_blank_lines)
+for ext in sorted(file_counts, key=file_counts.get, reverse=True):
+ print "* linted {} {} files".format(file_counts[ext], ext)
+print "* linted {} other files".format(count_other_linted_files)
+print "* total lines of code: {}".format(count_lines)
+print "* total non-blank lines of code: {}".format(count_non_blank_lines)
print
sys.exit(err)
sys.stderr.write("cannot load %s" % f)
exit(1)
-def is_valid_unicode(n):
- return 0 <= n <= 0xD7FF or 0xE000 <= n <= 0x10FFFF
+def is_surrogate(n):
+ return 0xD800 <= n <= 0xDFFF
def load_unicode_data(f):
fetch(f)
canon_decomp = {}
compat_decomp = {}
+ udict = {};
+ range_start = -1;
for line in fileinput.input(f):
- fields = line.split(";")
- if len(fields) != 15:
+ data = line.split(';');
+ if len(data) != 15:
continue
- [code, name, gencat, combine, bidi,
- decomp, deci, digit, num, mirror,
- old, iso, upcase, lowcase, titlecase ] = fields
-
- code_org = code
- code = int(code, 16)
-
- if not is_valid_unicode(code):
+ cp = int(data[0], 16);
+ if is_surrogate(cp):
continue
+ if range_start >= 0:
+ for i in xrange(range_start, cp):
+ udict[i] = data;
+ range_start = -1;
+ if data[1].endswith(", First>"):
+ range_start = cp;
+ continue;
+ udict[cp] = data;
+
+ for code in udict:
+ [code_org, name, gencat, combine, bidi,
+ decomp, deci, digit, num, mirror,
+ old, iso, upcase, lowcase, titlecase ] = udict[code];
# generate char to char direct common and simple conversions
# uppercase to lowercase
%precedence MOD_SEP
%precedence RARROW ':'
+// In where clauses, "for" should have greater precedence when used as
+// a higher ranked constraint than when used as the beginning of a
+// for_in_type (which is a ty)
+%precedence FORTYPE
+%precedence FOR
+
// Binops & unops, and their precedences
%precedence BOX
%precedence BOXPLACE
{
$$ = mk_node("ItemImplNeg", 7, $1, $3, $5, $7, $8, $10, $11);
}
+| maybe_unsafe IMPL generic_params trait_ref FOR DOTDOT '{' '}'
+{
+ $$ = mk_node("ItemImplDefault", 3, $1, $3, $4);
+}
+| maybe_unsafe IMPL generic_params '!' trait_ref FOR DOTDOT '{' '}'
+{
+ $$ = mk_node("ItemImplDefaultNeg", 3, $1, $3, $4);
+}
;
maybe_impl_items
;
where_predicate
-: lifetime ':' bounds { $$ = mk_node("WherePredicate", 2, $1, $3); }
-| ty ':' ty_param_bounds { $$ = mk_node("WherePredicate", 2, $1, $3); }
+: maybe_for_lifetimes lifetime ':' bounds { $$ = mk_node("WherePredicate", 3, $1, $2, $4); }
+| maybe_for_lifetimes ty ':' ty_param_bounds { $$ = mk_node("WherePredicate", 3, $1, $2, $4); }
;
+maybe_for_lifetimes
+: FOR '<' lifetimes '>' { $$ = mk_none(); }
+| %prec FORTYPE %empty { $$ = mk_none(); }
+
ty_params
: ty_param { $$ = mk_node("TyParams", 1, $1); }
| ty_params ',' ty_param { $$ = ext_node($1, 1, $3); }
}
| ty_qualified_path ',' ty_sums maybe_bindings
{
- $$ = mk_node("GenericValues", 3, mk_none(), ext_node(mk_node("TySums", 1, $1), 1, $3), $4); }
+ $$ = mk_node("GenericValues", 3, mk_none(), mk_node("TySums", 2, $1, $3), $4);
+}
;
ty_qualified_path
;
expr_qualified_path
-: '<' ty_sum AS trait_ref '>' MOD_SEP ident
+: '<' ty_sum maybe_as_trait_ref '>' MOD_SEP ident
{
- $$ = mk_node("ExprQualifiedPath", 3, $2, $4, $7);
+ $$ = mk_node("ExprQualifiedPath", 3, $2, $3, $6);
}
-| '<' ty_sum AS trait_ref '>' MOD_SEP ident generic_args
+| '<' ty_sum maybe_as_trait_ref '>' MOD_SEP ident generic_args
{
- $$ = mk_node("ExprQualifiedPath", 4, $2, $4, $7, $8);
+ $$ = mk_node("ExprQualifiedPath", 4, $2, $3, $6, $7);
}
-| SHL ty_sum AS trait_ref '>' MOD_SEP ident AS trait_ref '>' MOD_SEP ident
+| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident maybe_as_trait_ref '>' MOD_SEP ident
{
- $$ = mk_node("ExprQualifiedPath", 3, mk_node("ExprQualifiedPath", 3, $2, $4, $7), $9, $12);
+ $$ = mk_node("ExprQualifiedPath", 3, mk_node("ExprQualifiedPath", 3, $2, $3, $6), $7, $10);
}
-| SHL ty_sum AS trait_ref '>' MOD_SEP ident generic_args AS trait_ref '>' MOD_SEP ident
+| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident generic_args maybe_as_trait_ref '>' MOD_SEP ident
{
- $$ = mk_node("ExprQualifiedPath", 3, mk_node("ExprQualifiedPath", 4, $2, $4, $7, $8), $10, $13);
+ $$ = mk_node("ExprQualifiedPath", 3, mk_node("ExprQualifiedPath", 4, $2, $3, $6, $7), $8, $11);
}
-| SHL ty_sum AS trait_ref '>' MOD_SEP ident AS trait_ref '>' MOD_SEP ident generic_args
+| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident maybe_as_trait_ref '>' MOD_SEP ident generic_args
{
- $$ = mk_node("ExprQualifiedPath", 4, mk_node("ExprQualifiedPath", 3, $2, $4, $7), $9, $12, $13);
+ $$ = mk_node("ExprQualifiedPath", 4, mk_node("ExprQualifiedPath", 3, $2, $3, $6), $7, $10, $11);
}
-| SHL ty_sum AS trait_ref '>' MOD_SEP ident generic_args AS trait_ref '>' MOD_SEP ident generic_args
+| SHL ty_sum maybe_as_trait_ref '>' MOD_SEP ident generic_args maybe_as_trait_ref '>' MOD_SEP ident generic_args
{
- $$ = mk_node("ExprQualifiedPath", 4, mk_node("ExprQualifiedPath", 4, $2, $4, $7, $8), $10, $13, $14);
+ $$ = mk_node("ExprQualifiedPath", 4, mk_node("ExprQualifiedPath", 4, $2, $3, $6, $7), $8, $11, $12);
}
+maybe_as_trait_ref
+: AS trait_ref { $$ = $2; }
+| %empty { $$ = mk_none(); }
+;
lambda_expr
: %prec LAMBDA
-Subproject commit b001609960ca33047e5cbc5a231c1e24b6041d4b
+Subproject commit e24a1a025a1f214e40eedafe3b9c7b1d69937922
//! }
//! ```
//!
-//! This will print `Cons(1i32, Box(Cons(2i32, Box(Nil))))`.
+//! This will print `Cons(1, Box(Cons(2, Box(Nil))))`.
#![stable(feature = "rust1", since = "1.0.0")]
#[test]
fn deref() {
fn homura<T: Deref<Target=i32>>(_: T) { }
- homura(Box::new(765i32));
+ homura(Box::new(765));
}
#[test]
fn raw_sized() {
unsafe {
- let x = Box::new(17i32);
+ let x = Box::new(17);
let p = boxed::into_raw(x);
assert_eq!(17, *p);
*p = 19;
// have to uselessly pretend to pad the longer one for type matching
if a_len < b_len {
- (a.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(b_len).skip(a_len)),
- b.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(0).skip(0)))
+ (a.blocks().enumerate().chain(iter::repeat(0).enumerate().take(b_len).skip(a_len)),
+ b.blocks().enumerate().chain(iter::repeat(0).enumerate().take(0).skip(0)))
} else {
- (a.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(0).skip(0)),
- b.blocks().enumerate().chain(iter::repeat(0u32).enumerate().take(a_len).skip(b_len)))
+ (a.blocks().enumerate().chain(iter::repeat(0).enumerate().take(0).skip(0)),
+ b.blocks().enumerate().chain(iter::repeat(0).enumerate().take(a_len).skip(b_len)))
}
}
/// Computes the bitmask for the final word of the vector
fn mask_for_bits(bits: usize) -> u32 {
// Note especially that a perfect multiple of u32::BITS should mask all 1s.
- !0u32 >> (u32::BITS as usize - bits % u32::BITS as usize) % u32::BITS as usize
+ !0 >> (u32::BITS as usize - bits % u32::BITS as usize) % u32::BITS as usize
}
impl BitVec {
pub fn from_elem(nbits: usize, bit: bool) -> BitVec {
let nblocks = blocks_for_bits(nbits);
let mut bit_vec = BitVec {
- storage: repeat(if bit { !0u32 } else { 0u32 }).take(nblocks).collect(),
+ storage: repeat(if bit { !0 } else { 0 }).take(nblocks).collect(),
nbits: nbits
};
bit_vec.fix_last_block();
}
if extra_bytes > 0 {
- let mut last_word = 0u32;
+ let mut last_word = 0;
for (i, &byte) in bytes[complete_words*4..].iter().enumerate() {
last_word |= (reverse_bits(byte) as u32) << (i * 8);
}
/// ```
#[inline]
pub fn set_all(&mut self) {
- for w in &mut self.storage { *w = !0u32; }
+ for w in &mut self.storage { *w = !0; }
self.fix_last_block();
}
/// assert_eq!(bv.all(), false);
/// ```
pub fn all(&self) -> bool {
- let mut last_word = !0u32;
+ let mut last_word = !0;
// Check that every block but the last is all-ones...
self.blocks().all(|elem| {
let tmp = last_word;
last_word = elem;
- tmp == !0u32
+ tmp == !0
// and then check the last one has enough ones
}) && (last_word == mask_for_bits(self.nbits))
}
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
- for w in &mut self.storage { *w = 0u32; }
+ for w in &mut self.storage { *w = 0; }
}
}
assert_eq!(bit_vec.iter().collect::<Vec<bool>>(), bools);
- let long: Vec<_> = (0i32..10000).map(|i| i % 2 == 0).collect();
+ let long: Vec<_> = (0..10000).map(|i| i % 2 == 0).collect();
let bit_vec: BitVec = long.iter().map(|n| *n).collect();
assert_eq!(bit_vec.iter().collect::<Vec<bool>>(), long)
}
//! Some examples of the output from both traits:
//!
//! ```
-//! assert_eq!(format!("{} {:?}", 3i32, 4i32), "3 4");
+//! assert_eq!(format!("{} {:?}", 3, 4), "3 4");
//! assert_eq!(format!("{} {:?}", 'a', 'b'), "a 'b'");
//! assert_eq!(format!("{} {:?}", "foo\n", "bar\n"), "foo\n \"bar\\n\"");
//! ```
fn test_connect() {
let v: [Vec<i32>; 0] = [];
assert_eq!(v.connect(&0), []);
- assert_eq!([vec![1i], vec![2, 3]].connect(&0), [1, 0, 2, 3]);
- assert_eq!([vec![1i], vec![2], vec![3]].connect(&0), [1, 0, 2, 0, 3]);
+ assert_eq!([vec![1], vec![2, 3]].connect(&0), [1, 0, 2, 3]);
+ assert_eq!([vec![1], vec![2], vec![3]].connect(&0), [1, 0, 2, 0, 3]);
let v: [&[_]; 2] = [&[1], &[2, 3]];
assert_eq!(v.connect(&0), [1, 0, 2, 3]);
#[test]
fn test_bytes_set_memory() {
use slice::bytes::MutableByteVector;
- let mut values = [1u8,2,3,4,5];
+ let mut values = [1,2,3,4,5];
values[0..5].set_memory(0xAB);
assert!(values == [0xAB, 0xAB, 0xAB, 0xAB, 0xAB]);
values[2..4].set_memory(0xFF);
fn test_mut_chunks() {
use core::iter::ExactSizeIterator;
- let mut v = [0u8, 1, 2, 3, 4, 5, 6];
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
assert_eq!(v.chunks_mut(2).len(), 4);
for (i, chunk) in v.chunks_mut(3).enumerate() {
for x in chunk {
*x = i as u8;
}
}
- let result = [0u8, 0, 0, 1, 1, 1, 2];
+ let result = [0, 0, 0, 1, 1, 1, 2];
assert!(v == result);
}
#[test]
fn test_mut_chunks_rev() {
- let mut v = [0u8, 1, 2, 3, 4, 5, 6];
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
for (i, chunk) in v.chunks_mut(3).rev().enumerate() {
for x in chunk {
*x = i as u8;
}
}
- let result = [2u8, 2, 2, 1, 1, 1, 0];
+ let result = [2, 2, 2, 1, 1, 1, 0];
assert!(v == result);
}
//
// ignore-lexer-test FIXME #15679
-//! Unicode string manipulation (the `str` type).
+//! Unicode string manipulation (the [`str`](../primitive.str.html) type).
//!
-//! Rust's `str` type is one of the core primitive types of the language. `&str` is the borrowed
-//! string type. This type of string can only be created from other strings, unless it is a static
-//! string (see below). As the word "borrowed" implies, this type of string is owned elsewhere, and
-//! this string cannot be moved out of.
+//! Rust's [`str`](../primitive.str.html) type is one of the core primitive types of the
+//! language. `&str` is the borrowed string type. This type of string can only be created
+//! from other strings, unless it is a `&'static str` (see below). It is not possible to
+//! move out of borrowed strings because they are owned elsewhere.
+//!
+//! Basic operations are implemented directly by the compiler, but more advanced operations are
+//! defined on the [`StrExt`](trait.StrExt.html) trait.
//!
//! # Examples
//!
// return the value of $ch updated with continuation byte $byte
macro_rules! utf8_acc_cont_byte {
- ($ch:expr, $byte:expr) => (($ch << 6) | ($byte & 63u8) as u32)
+ ($ch:expr, $byte:expr) => (($ch << 6) | ($byte & 63) as u32)
}
#[stable(feature = "rust1", since = "1.0.0")]
#[test]
fn test_chars_decoding() {
- let mut bytes = [0u8; 4];
- for c in (0u32..0x110000).filter_map(|c| ::core::char::from_u32(c)) {
+ let mut bytes = [0; 4];
+ for c in (0..0x110000).filter_map(|c| ::core::char::from_u32(c)) {
let len = c.encode_utf8(&mut bytes).unwrap_or(0);
let s = ::core::str::from_utf8(&bytes[..len]).unwrap();
if Some(c) != s.chars().next() {
#[test]
fn test_chars_rev_decoding() {
- let mut bytes = [0u8; 4];
- for c in (0u32..0x110000).filter_map(|c| ::core::char::from_u32(c)) {
+ let mut bytes = [0; 4];
+ for c in (0..0x110000).filter_map(|c| ::core::char::from_u32(c)) {
let len = c.encode_utf8(&mut bytes).unwrap_or(0);
let s = ::core::str::from_utf8(&bytes[..len]).unwrap();
if Some(c) != s.chars().rev().next() {
}
}
- const TAG_CONT_U8: u8 = 128u8;
+ const TAG_CONT_U8: u8 = 128;
const REPLACEMENT: &'static [u8] = b"\xEF\xBF\xBD"; // U+FFFD in UTF-8
let total = v.len();
fn unsafe_get(xs: &[u8], i: usize) -> u8 {
}
})}
- if byte < 128u8 {
+ if byte < 128 {
// subseqidx handles this
} else {
let w = unicode_str::utf8_char_width(byte);
match w {
2 => {
- if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 {
+ if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
}
}
i += 1;
- if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 {
+ if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
}
}
i += 1;
- if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 {
+ if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
i += 1;
- if safe_get(v, i, total) & 192u8 != TAG_CONT_U8 {
+ if safe_get(v, i, total) & 192 != TAG_CONT_U8 {
error!();
continue;
}
fn test_from_utf16() {
let pairs =
[(String::from_str("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"),
- vec![0xd800_u16, 0xdf45_u16, 0xd800_u16, 0xdf3f_u16,
- 0xd800_u16, 0xdf3b_u16, 0xd800_u16, 0xdf46_u16,
- 0xd800_u16, 0xdf39_u16, 0xd800_u16, 0xdf3b_u16,
- 0xd800_u16, 0xdf30_u16, 0x000a_u16]),
+ vec![0xd800, 0xdf45, 0xd800, 0xdf3f,
+ 0xd800, 0xdf3b, 0xd800, 0xdf46,
+ 0xd800, 0xdf39, 0xd800, 0xdf3b,
+ 0xd800, 0xdf30, 0x000a]),
(String::from_str("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"),
- vec![0xd801_u16, 0xdc12_u16, 0xd801_u16,
- 0xdc49_u16, 0xd801_u16, 0xdc2e_u16, 0xd801_u16,
- 0xdc40_u16, 0xd801_u16, 0xdc32_u16, 0xd801_u16,
- 0xdc4b_u16, 0x0020_u16, 0xd801_u16, 0xdc0f_u16,
- 0xd801_u16, 0xdc32_u16, 0xd801_u16, 0xdc4d_u16,
- 0x000a_u16]),
+ vec![0xd801, 0xdc12, 0xd801,
+ 0xdc49, 0xd801, 0xdc2e, 0xd801,
+ 0xdc40, 0xd801, 0xdc32, 0xd801,
+ 0xdc4b, 0x0020, 0xd801, 0xdc0f,
+ 0xd801, 0xdc32, 0xd801, 0xdc4d,
+ 0x000a]),
(String::from_str("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"),
- vec![0xd800_u16, 0xdf00_u16, 0xd800_u16, 0xdf16_u16,
- 0xd800_u16, 0xdf0b_u16, 0xd800_u16, 0xdf04_u16,
- 0xd800_u16, 0xdf11_u16, 0xd800_u16, 0xdf09_u16,
- 0x00b7_u16, 0xd800_u16, 0xdf0c_u16, 0xd800_u16,
- 0xdf04_u16, 0xd800_u16, 0xdf15_u16, 0xd800_u16,
- 0xdf04_u16, 0xd800_u16, 0xdf0b_u16, 0xd800_u16,
- 0xdf09_u16, 0xd800_u16, 0xdf11_u16, 0x000a_u16 ]),
+ vec![0xd800, 0xdf00, 0xd800, 0xdf16,
+ 0xd800, 0xdf0b, 0xd800, 0xdf04,
+ 0xd800, 0xdf11, 0xd800, 0xdf09,
+ 0x00b7, 0xd800, 0xdf0c, 0xd800,
+ 0xdf04, 0xd800, 0xdf15, 0xd800,
+ 0xdf04, 0xd800, 0xdf0b, 0xd800,
+ 0xdf09, 0xd800, 0xdf11, 0x000a ]),
(String::from_str("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"),
- vec![0xd801_u16, 0xdc8b_u16, 0xd801_u16, 0xdc98_u16,
- 0xd801_u16, 0xdc88_u16, 0xd801_u16, 0xdc91_u16,
- 0xd801_u16, 0xdc9b_u16, 0xd801_u16, 0xdc92_u16,
- 0x0020_u16, 0xd801_u16, 0xdc95_u16, 0xd801_u16,
- 0xdc93_u16, 0x0020_u16, 0xd801_u16, 0xdc88_u16,
- 0xd801_u16, 0xdc9a_u16, 0xd801_u16, 0xdc8d_u16,
- 0x0020_u16, 0xd801_u16, 0xdc8f_u16, 0xd801_u16,
- 0xdc9c_u16, 0xd801_u16, 0xdc92_u16, 0xd801_u16,
- 0xdc96_u16, 0xd801_u16, 0xdc86_u16, 0x0020_u16,
- 0xd801_u16, 0xdc95_u16, 0xd801_u16, 0xdc86_u16,
- 0x000a_u16 ]),
+ vec![0xd801, 0xdc8b, 0xd801, 0xdc98,
+ 0xd801, 0xdc88, 0xd801, 0xdc91,
+ 0xd801, 0xdc9b, 0xd801, 0xdc92,
+ 0x0020, 0xd801, 0xdc95, 0xd801,
+ 0xdc93, 0x0020, 0xd801, 0xdc88,
+ 0xd801, 0xdc9a, 0xd801, 0xdc8d,
+ 0x0020, 0xd801, 0xdc8f, 0xd801,
+ 0xdc9c, 0xd801, 0xdc92, 0xd801,
+ 0xdc96, 0xd801, 0xdc86, 0x0020,
+ 0xd801, 0xdc95, 0xd801, 0xdc86,
+ 0x000a ]),
// Issue #12318, even-numbered non-BMP planes
(String::from_str("\u{20000}"),
vec![0xD840, 0xDC00])];
assert_eq!(1.to_string(), "1");
assert_eq!((-1).to_string(), "-1");
assert_eq!(200.to_string(), "200");
- assert_eq!(2u8.to_string(), "2");
+ assert_eq!(2.to_string(), "2");
assert_eq!(true.to_string(), "true");
assert_eq!(false.to_string(), "false");
assert_eq!(("hi".to_string()).to_string(), "hi");
#[bench]
fn from_utf8_lossy_100_invalid(b: &mut Bencher) {
- let s = repeat(0xf5u8).take(100).collect::<Vec<_>>();
+ let s = repeat(0xf5).take(100).collect::<Vec<_>>();
b.iter(|| {
let _ = String::from_utf8_lossy(&s);
});
use slice::SliceExt;
// UTF-8 ranges and tags for encoding characters
-const TAG_CONT: u8 = 0b1000_0000u8;
-const TAG_TWO_B: u8 = 0b1100_0000u8;
-const TAG_THREE_B: u8 = 0b1110_0000u8;
-const TAG_FOUR_B: u8 = 0b1111_0000u8;
-const MAX_ONE_B: u32 = 0x80u32;
-const MAX_TWO_B: u32 = 0x800u32;
-const MAX_THREE_B: u32 = 0x10000u32;
+const TAG_CONT: u8 = 0b1000_0000;
+const TAG_TWO_B: u8 = 0b1100_0000;
+const TAG_THREE_B: u8 = 0b1110_0000;
+const TAG_FOUR_B: u8 = 0b1111_0000;
+const MAX_ONE_B: u32 = 0x80;
+const MAX_TWO_B: u32 = 0x800;
+const MAX_THREE_B: u32 = 0x10000;
/*
Lu Uppercase_Letter an uppercase letter
#[stable(feature = "rust1", since = "1.0.0")]
fn len_utf16(self) -> usize {
let ch = self as u32;
- if (ch & 0xFFFF_u32) == ch { 1 } else { 2 }
+ if (ch & 0xFFFF) == ch { 1 } else { 2 }
}
#[inline]
dst[0] = code as u8;
Some(1)
} else if code < MAX_TWO_B && dst.len() >= 2 {
- dst[0] = (code >> 6 & 0x1F_u32) as u8 | TAG_TWO_B;
- dst[1] = (code & 0x3F_u32) as u8 | TAG_CONT;
+ dst[0] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;
+ dst[1] = (code & 0x3F) as u8 | TAG_CONT;
Some(2)
} else if code < MAX_THREE_B && dst.len() >= 3 {
- dst[0] = (code >> 12 & 0x0F_u32) as u8 | TAG_THREE_B;
- dst[1] = (code >> 6 & 0x3F_u32) as u8 | TAG_CONT;
- dst[2] = (code & 0x3F_u32) as u8 | TAG_CONT;
+ dst[0] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;
+ dst[1] = (code >> 6 & 0x3F) as u8 | TAG_CONT;
+ dst[2] = (code & 0x3F) as u8 | TAG_CONT;
Some(3)
} else if dst.len() >= 4 {
- dst[0] = (code >> 18 & 0x07_u32) as u8 | TAG_FOUR_B;
- dst[1] = (code >> 12 & 0x3F_u32) as u8 | TAG_CONT;
- dst[2] = (code >> 6 & 0x3F_u32) as u8 | TAG_CONT;
- dst[3] = (code & 0x3F_u32) as u8 | TAG_CONT;
+ dst[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;
+ dst[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;
+ dst[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;
+ dst[3] = (code & 0x3F) as u8 | TAG_CONT;
Some(4)
} else {
None
#[unstable(feature = "core")]
pub fn encode_utf16_raw(mut ch: u32, dst: &mut [u16]) -> Option<usize> {
// Marked #[inline] to allow llvm optimizing it away
- if (ch & 0xFFFF_u32) == ch && dst.len() >= 1 {
+ if (ch & 0xFFFF) == ch && dst.len() >= 1 {
// The BMP falls through (assuming non-surrogate, as it should)
dst[0] = ch as u16;
Some(1)
} else if dst.len() >= 2 {
// Supplementary planes break into surrogates.
- ch -= 0x1_0000_u32;
- dst[0] = 0xD800_u16 | ((ch >> 10) as u16);
- dst[1] = 0xDC00_u16 | ((ch as u16) & 0x3FF_u16);
+ ch -= 0x1_0000;
+ dst[0] = 0xD800 | ((ch >> 10) as u16);
+ dst[1] = 0xDC00 | ((ch as u16) & 0x3FF);
Some(2)
} else {
None
// For an f64 the exponent is in the range of [-1022, 1023] for base 2, so
// we may have up to that many digits. Give ourselves some extra wiggle room
// otherwise as well.
- let mut buf = [0u8; 1536];
+ let mut buf = [0; 1536];
let mut end = 0;
let radix_gen: T = cast(radix as int).unwrap();
let (num, exp) = match exp_format {
- ExpNone => (num, 0i32),
- ExpDec if num == _0 => (num, 0i32),
+ ExpNone => (num, 0),
+ ExpDec if num == _0 => (num, 0),
ExpDec => {
let (exp, exp_base) = match exp_format {
ExpDec => (num.abs().log10().floor(), cast::<f64, T>(10.0f64).unwrap()),
Alignment::Center => (padding / 2, (padding + 1) / 2),
};
- let mut fill = [0u8; 4];
+ let mut fill = [0; 4];
let len = self.fill.encode_utf8(&mut fill).unwrap_or(0);
let fill = unsafe { str::from_utf8_unchecked(&fill[..len]) };
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for char {
fn fmt(&self, f: &mut Formatter) -> Result {
- let mut utf8 = [0u8; 4];
+ let mut utf8 = [0; 4];
let amt = self.encode_utf8(&mut utf8).unwrap_or(0);
let s: &str = unsafe { mem::transmute(&utf8[..amt]) };
Display::fmt(s, f)
// characters for a base 2 number.
let zero = Int::zero();
let is_positive = x >= zero;
- let mut buf = [0u8; 64];
+ let mut buf = [0; 64];
let mut curr = buf.len();
let base = cast(self.base()).unwrap();
if is_positive {
($buf:expr, $i:expr, $len:expr) =>
({
let mut t = 0;
- let mut out = 0u64;
+ let mut out = 0;
while t < $len {
out |= ($buf[t+$i] as u64) << t*8;
t += 1;
/// use std::mem;
///
/// let v: &[u8] = unsafe { mem::transmute("L") };
- /// assert!(v == [76u8]);
+ /// assert!(v == [76]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn transmute<T,U>(e: T) -> U;
/// ```
/// use std::iter::AdditiveIterator;
///
- /// let a = [1i32, 2, 3, 4, 5];
+ /// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter().cloned();
/// assert!(it.sum() == 15);
/// ```
f: F,
}
-impl<I: Iterator, F, B> Map<I, F> where F: FnMut(I::Item) -> B {
- #[inline]
- fn do_map(&mut self, elt: Option<I::Item>) -> Option<B> {
- match elt {
- Some(a) => Some((self.f)(a)),
- _ => None
- }
- }
-}
-
#[stable(feature = "rust1", since = "1.0.0")]
impl<B, I: Iterator, F> Iterator for Map<I, F> where F: FnMut(I::Item) -> B {
type Item = B;
#[inline]
fn next(&mut self) -> Option<B> {
- let next = self.iter.next();
- self.do_map(next)
+ self.iter.next().map(|a| (self.f)(a))
}
#[inline]
{
#[inline]
fn next_back(&mut self) -> Option<B> {
- let next = self.iter.next_back();
- self.do_map(next)
+ self.iter.next_back().map(|a| (self.f)(a))
}
}
#[inline]
fn idx(&mut self, index: usize) -> Option<B> {
- let elt = self.iter.idx(index);
- self.do_map(elt)
+ self.iter.idx(index).map(|a| (self.f)(a))
}
}
/// This will invoke the `panic!` macro if the provided expression cannot be
/// evaluated to `true` at runtime.
///
-/// Unlike `assert!`, `debug_assert!` statements can be disabled by passing
-/// `--cfg ndebug` to the compiler. This makes `debug_assert!` useful for
-/// checks that are too expensive to be present in a release build but may be
-/// helpful during development.
+/// Unlike `assert!`, `debug_assert!` statements are only enabled in non
+/// optimized builds by default. An optimized build will omit all
+/// `debug_assert!` statements unless `-C debug-assertions` is passed to the
+/// compiler. This makes `debug_assert!` useful for checks that are too
+/// expensive to be present in a release build but may be helpful during
+/// development.
///
/// # Example
///
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! debug_assert {
- ($($arg:tt)*) => (if cfg!(not(ndebug)) { assert!($($arg)*); })
+ ($($arg:tt)*) => (if cfg!(debug_assertions) { assert!($($arg)*); })
}
/// Asserts that two expressions are equal to each other, testing equality in
///
/// On panic, this macro will print the values of the expressions.
///
-/// Unlike `assert_eq!`, `debug_assert_eq!` statements can be disabled by
-/// passing `--cfg ndebug` to the compiler. This makes `debug_assert_eq!`
-/// useful for checks that are too expensive to be present in a release build
-/// but may be helpful during development.
+/// Unlike `assert_eq!`, `debug_assert_eq!` statements are only enabled in non
+/// optimized builds by default. An optimized build will omit all
+/// `debug_assert_eq!` statements unless `-C debug-assertions` is passed to the
+/// compiler. This makes `debug_assert_eq!` useful for checks that are too
+/// expensive to be present in a release build but may be helpful during
+/// development.
///
/// # Example
///
/// ```
#[macro_export]
macro_rules! debug_assert_eq {
- ($($arg:tt)*) => (if cfg!(not(ndebug)) { assert_eq!($($arg)*); })
+ ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_eq!($($arg)*); })
}
/// Short circuiting evaluation on Err
///
/// ```rust
/// fn divide_by_three(x: u32) -> u32 { // one of the poorest implementations of x/3
-/// for i in std::iter::count(0_u32, 1) {
+/// for i in std::iter::count(0, 1) {
/// if 3*i < i { panic!("u32 overflow"); }
/// if x < 3*i { return i-1; }
/// }
macro_rules! impl_from_primitive {
($T:ty, $to_ty:ident) => (
+ #[allow(deprecated)]
impl FromPrimitive for $T {
#[inline] fn from_int(n: int) -> Option<$T> { n.$to_ty() }
#[inline] fn from_i8(n: i8) -> Option<$T> { n.$to_ty() }
($T:ty, $conv:ident) => (
impl NumCast for $T {
#[inline]
+ #[allow(deprecated)]
fn from<N: ToPrimitive>(n: N) -> Option<$T> {
// `$conv` could be generated using `concat_idents!`, but that
// macro seems to be broken at the moment
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Index<Idx: ?Sized> {
/// The returned type after indexing
+ #[stable(feature = "rust1", since = "1.0.0")]
type Output: ?Sized;
/// The method for the indexing (`Foo[Bar]`) operation
/// # Example
///
/// ```
- /// let k = 10i32;
+ /// let k = 10;
/// assert_eq!(Some(4).unwrap_or_else(|| 2 * k), 4);
/// assert_eq!(None.unwrap_or_else(|| 2 * k), 20);
/// ```
}
/// Mask of the value bits of a continuation byte
-const CONT_MASK: u8 = 0b0011_1111u8;
+const CONT_MASK: u8 = 0b0011_1111;
/// Value of the tag bits (tag mask is !CONT_MASK) of a continuation byte
-const TAG_CONT_U8: u8 = 0b1000_0000u8;
+const TAG_CONT_U8: u8 = 0b1000_0000;
/*
Section: Trait implementations
if index == self.len() { return true; }
match self.as_bytes().get(index) {
None => false,
- Some(&b) => b < 128u8 || b >= 192u8,
+ Some(&b) => b < 128 || b >= 192,
}
}
#[inline]
#[unstable(feature = "core")]
pub fn char_range_at_raw(bytes: &[u8], i: usize) -> (u32, usize) {
- if bytes[i] < 128u8 {
+ if bytes[i] < 128 {
return (bytes[i] as u32, i + 1);
}
#[test]
fn test_encode_utf8() {
fn check(input: char, expect: &[u8]) {
- let mut buf = [0u8; 4];
+ let mut buf = [0; 4];
let n = input.encode_utf8(&mut buf).unwrap_or(0);
assert_eq!(&buf[..n], expect);
}
#[test]
fn test_encode_utf16() {
fn check(input: char, expect: &[u16]) {
- let mut buf = [0u16; 2];
+ let mut buf = [0; 2];
let n = input.encode_utf16(&mut buf).unwrap_or(0);
assert_eq!(&buf[..n], expect);
}
// FIXME (#18283) Enable test
//let s: Box<str> = box "a";
//assert_eq!(hasher.hash(& s), 97 + 0xFF);
- let cs: &[u8] = &[1u8, 2u8, 3u8];
+ let cs: &[u8] = &[1, 2, 3];
assert_eq!(hash(& cs), 9);
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
- let cs: Box<[u8]> = Box::new([1u8, 2u8, 3u8]);
+ let cs: Box<[u8]> = Box::new([1, 2, 3]);
assert_eq!(hash(& cs), 9);
// FIXME (#18248) Add tests for hashing Rc<str> and Rc<[T]>
impl Hasher for CustomHasher {
fn finish(&self) -> u64 { self.output }
- fn write(&mut self, data: &[u8]) { panic!() }
+ fn write(&mut self, _: &[u8]) { panic!() }
fn write_u64(&mut self, data: u64) { self.output = data; }
}
[ 0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95, ]
];
- let k0 = 0x_07_06_05_04_03_02_01_00_u64;
- let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08_u64;
+ let k0 = 0x_07_06_05_04_03_02_01_00;
+ let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08;
let mut buf = Vec::new();
let mut t = 0;
let mut state_inc = SipState::new_with_keys(k0, k1);
assert!(s != t && t != u);
assert!(hash(&s) != hash(&t) && hash(&s) != hash(&u));
- let v: (&[u8], &[u8], &[u8]) = (&[1u8], &[0u8, 0], &[0u8]);
- let w: (&[u8], &[u8], &[u8]) = (&[1u8, 0, 0, 0], &[], &[]);
+ let v: (&[u8], &[u8], &[u8]) = (&[1], &[0, 0], &[0]);
+ let w: (&[u8], &[u8], &[u8]) = (&[1, 0, 0, 0], &[], &[]);
assert!(v != w);
assert!(hash(&v) != hash(&w));
assert_eq!(range_step(0, 20, 5).collect::<Vec<int>>(), [0, 5, 10, 15]);
assert_eq!(range_step(20, 0, -5).collect::<Vec<int>>(), [20, 15, 10, 5]);
assert_eq!(range_step(20, 0, -6).collect::<Vec<int>>(), [20, 14, 8, 2]);
- assert_eq!(range_step(200u8, 255, 50).collect::<Vec<u8>>(), [200u8, 250]);
- assert_eq!(range_step(200i, -5, 1).collect::<Vec<int>>(), []);
- assert_eq!(range_step(200i, 200, 1).collect::<Vec<int>>(), []);
+ assert_eq!(range_step(200, 255, 50).collect::<Vec<u8>>(), [200, 250]);
+ assert_eq!(range_step(200, -5, 1).collect::<Vec<int>>(), []);
+ assert_eq!(range_step(200, 200, 1).collect::<Vec<int>>(), []);
}
#[test]
assert_eq!(range_step_inclusive(0, 20, 5).collect::<Vec<int>>(), [0, 5, 10, 15, 20]);
assert_eq!(range_step_inclusive(20, 0, -5).collect::<Vec<int>>(), [20, 15, 10, 5, 0]);
assert_eq!(range_step_inclusive(20, 0, -6).collect::<Vec<int>>(), [20, 14, 8, 2]);
- assert_eq!(range_step_inclusive(200u8, 255, 50).collect::<Vec<u8>>(), [200u8, 250]);
+ assert_eq!(range_step_inclusive(200, 255, 50).collect::<Vec<u8>>(), [200, 250]);
assert_eq!(range_step_inclusive(200, -5, 1).collect::<Vec<int>>(), []);
assert_eq!(range_step_inclusive(200, 200, 1).collect::<Vec<int>>(), [200]);
}
}
unsafe {
- assert_eq!([76u8], transmute::<_, Vec<u8>>("L".to_string()));
+ assert_eq!([76], transmute::<_, Vec<u8>>("L".to_string()));
}
}
#[test]
fn test_int_from_str_overflow() {
- let mut i8_val: i8 = 127_i8;
+ let mut i8_val: i8 = 127;
assert_eq!("127".parse::<i8>().ok(), Some(i8_val));
assert_eq!("128".parse::<i8>().ok(), None);
assert_eq!("-128".parse::<i8>().ok(), Some(i8_val));
assert_eq!("-129".parse::<i8>().ok(), None);
- let mut i16_val: i16 = 32_767_i16;
+ let mut i16_val: i16 = 32_767;
assert_eq!("32767".parse::<i16>().ok(), Some(i16_val));
assert_eq!("32768".parse::<i16>().ok(), None);
assert_eq!("-32768".parse::<i16>().ok(), Some(i16_val));
assert_eq!("-32769".parse::<i16>().ok(), None);
- let mut i32_val: i32 = 2_147_483_647_i32;
+ let mut i32_val: i32 = 2_147_483_647;
assert_eq!("2147483647".parse::<i32>().ok(), Some(i32_val));
assert_eq!("2147483648".parse::<i32>().ok(), None);
assert_eq!("-2147483648".parse::<i32>().ok(), Some(i32_val));
assert_eq!("-2147483649".parse::<i32>().ok(), None);
- let mut i64_val: i64 = 9_223_372_036_854_775_807_i64;
+ let mut i64_val: i64 = 9_223_372_036_854_775_807;
assert_eq!("9223372036854775807".parse::<i64>().ok(), Some(i64_val));
assert_eq!("9223372036854775808".parse::<i64>().ok(), None);
fn test_ptr_subtraction() {
unsafe {
let xs = vec![0,1,2,3,4,5,6,7,8,9];
- let mut idx = 9i8;
+ let mut idx = 9;
let ptr = xs.as_ptr();
- while idx >= 0i8 {
+ while idx >= 0 {
assert_eq!(*(ptr.offset(idx as int)), idx as int);
- idx = idx - 1i8;
+ idx = idx - 1;
}
let mut xs_mut = xs;
mod pattern {
use std::str::Pattern;
- use std::str::{Searcher, ReverseSearcher, DoubleEndedSearcher};
+ use std::str::{Searcher, ReverseSearcher};
use std::str::SearchStep::{self, Match, Reject, Done};
macro_rules! make_test {
($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => {
mod $name {
- use std::str::Pattern;
- use std::str::{Searcher, ReverseSearcher, DoubleEndedSearcher};
- use std::str::SearchStep::{self, Match, Reject, Done};
+ use std::str::SearchStep::{Match, Reject};
use super::{cmp_search_to_vec};
#[test]
fn fwd() {
#![feature(int_uint)]
#![feature(staged_api)]
#![feature(str_words)]
+#![feature(core)]
#![cfg_attr(test, feature(rustc_private))]
#[cfg(test)] #[macro_use] extern crate log;
#![crate_name = "libc"]
#![crate_type = "rlib"]
-#![cfg_attr(not(feature = "cargo-build"),
- unstable(feature = "libc"))]
-#![cfg_attr(not(feature = "cargo-build"), feature(staged_api))]
+#![cfg_attr(not(feature = "cargo-build"), unstable(feature = "libc"))]
+#![cfg_attr(not(feature = "cargo-build"), feature(staged_api, core, no_std))]
#![cfg_attr(not(feature = "cargo-build"), staged_api)]
-#![cfg_attr(not(feature = "cargo-build"), feature(core))]
-#![feature(no_std)]
-#![no_std]
+#![cfg_attr(not(feature = "cargo-build"), no_std)]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
target_arch = "mips",
target_arch = "mipsel",
target_arch = "powerpc",
- target_arch = "le32"))]
+ target_arch = "le32",
+ all(target_arch = "arm", not(target_os = "android"))))]
pub mod posix88 {
pub type off_t = i32;
pub type dev_t = u64;
pub type mode_t = u32;
pub type ssize_t = i32;
}
- #[cfg(target_arch = "arm")]
+ #[cfg(all(target_arch = "arm", target_os = "android"))]
pub mod posix88 {
pub type off_t = i32;
pub type dev_t = u32;
}
#[cfg(any(target_arch = "x86",
target_arch = "le32",
- target_arch = "powerpc"))]
+ target_arch = "powerpc",
+ all(target_arch = "arm", not(target_os = "android"))))]
pub mod posix01 {
use types::os::arch::c95::{c_short, c_long, time_t};
use types::os::arch::posix88::{dev_t, gid_t, ino_t};
pub __size: [u32; 9]
}
}
- #[cfg(target_arch = "arm")]
+ #[cfg(all(target_arch = "arm", target_os = "android"))]
pub mod posix01 {
use types::os::arch::c95::{c_uchar, c_uint, c_ulong, time_t};
use types::os::arch::c99::{c_longlong, c_ulonglong};
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 4;
pub const _IOLBF : c_int = 64;
- pub const BUFSIZ : c_uint = 512_u32;
- pub const FOPEN_MAX : c_uint = 20_u32;
- pub const FILENAME_MAX : c_uint = 260_u32;
- pub const L_tmpnam : c_uint = 16_u32;
- pub const TMP_MAX : c_uint = 32767_u32;
+ pub const BUFSIZ : c_uint = 512;
+ pub const FOPEN_MAX : c_uint = 20;
+ pub const FILENAME_MAX : c_uint = 260;
+ pub const L_tmpnam : c_uint = 16;
+ pub const TMP_MAX : c_uint = 32767;
pub const WSAEINTR: c_int = 10004;
pub const WSAEBADF: c_int = 10009;
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 2;
pub const _IOLBF : c_int = 1;
- pub const BUFSIZ : c_uint = 8192_u32;
- pub const FOPEN_MAX : c_uint = 16_u32;
- pub const FILENAME_MAX : c_uint = 4096_u32;
- pub const L_tmpnam : c_uint = 20_u32;
- pub const TMP_MAX : c_uint = 238328_u32;
+ pub const BUFSIZ : c_uint = 8192;
+ pub const FOPEN_MAX : c_uint = 16;
+ pub const FILENAME_MAX : c_uint = 4096;
+ pub const L_tmpnam : c_uint = 20;
+ pub const TMP_MAX : c_uint = 238328;
}
pub mod c99 {
}
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 2;
pub const _IOLBF : c_int = 1;
- pub const BUFSIZ : c_uint = 1024_u32;
- pub const FOPEN_MAX : c_uint = 20_u32;
- pub const FILENAME_MAX : c_uint = 1024_u32;
- pub const L_tmpnam : c_uint = 1024_u32;
- pub const TMP_MAX : c_uint = 308915776_u32;
+ pub const BUFSIZ : c_uint = 1024;
+ pub const FOPEN_MAX : c_uint = 20;
+ pub const FILENAME_MAX : c_uint = 1024;
+ pub const L_tmpnam : c_uint = 1024;
+ pub const TMP_MAX : c_uint = 308915776;
}
pub mod c99 {
}
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 2;
pub const _IOLBF : c_int = 1;
- pub const BUFSIZ : c_uint = 1024_u32;
- pub const FOPEN_MAX : c_uint = 20_u32;
- pub const FILENAME_MAX : c_uint = 1024_u32;
- pub const L_tmpnam : c_uint = 1024_u32;
- pub const TMP_MAX : c_uint = 308915776_u32;
+ pub const BUFSIZ : c_uint = 1024;
+ pub const FOPEN_MAX : c_uint = 20;
+ pub const FILENAME_MAX : c_uint = 1024;
+ pub const L_tmpnam : c_uint = 1024;
+ pub const TMP_MAX : c_uint = 308915776;
}
pub mod c99 {
}
pub const _IOFBF : c_int = 0;
pub const _IONBF : c_int = 2;
pub const _IOLBF : c_int = 1;
- pub const BUFSIZ : c_uint = 1024_u32;
- pub const FOPEN_MAX : c_uint = 20_u32;
- pub const FILENAME_MAX : c_uint = 1024_u32;
- pub const L_tmpnam : c_uint = 1024_u32;
- pub const TMP_MAX : c_uint = 308915776_u32;
+ pub const BUFSIZ : c_uint = 1024;
+ pub const FOPEN_MAX : c_uint = 20;
+ pub const FILENAME_MAX : c_uint = 1024;
+ pub const L_tmpnam : c_uint = 1024;
+ pub const TMP_MAX : c_uint = 308915776;
}
pub mod c99 {
}
use types::os::arch::c95::{c_char, c_int};
use types::os::arch::posix88::mode_t;
+ mod open_shim {
+ extern {
+ #[cfg(any(target_os = "macos",
+ target_os = "ios"))]
+ pub fn open(path: *const ::c_char, oflag: ::c_int, ...)
+ -> ::c_int;
+
+ #[cfg(not(any(target_os = "macos",
+ target_os = "ios")))]
+ pub fn open(path: *const ::c_char, oflag: ::c_int, mode: ::mode_t)
+ -> ::c_int;
+ }
+ }
+
+ #[cfg(any(target_os = "macos",
+ target_os = "ios"))]
+ #[inline]
+ pub unsafe extern fn open(path: *const c_char, oflag: c_int, mode: mode_t) -> c_int {
+ use types::os::arch::c95::c_uint;
+ open_shim::open(path, oflag, mode as c_uint)
+ }
+
+ #[cfg(not(any(target_os = "macos",
+ target_os = "ios")))]
+ #[inline]
+ pub unsafe extern fn open(path: *const c_char, oflag: c_int, mode: mode_t) -> c_int {
+ open_shim::open(path, oflag, mode)
+ }
+
extern {
- pub fn open(path: *const c_char, oflag: c_int, mode: mode_t)
- -> c_int;
pub fn creat(path: *const c_char, mode: mode_t) -> c_int;
pub fn fcntl(fd: c_int, cmd: c_int, ...) -> c_int;
}
/// ```
#[macro_export]
macro_rules! debug {
- ($($arg:tt)*) => (if cfg!(not(ndebug)) { log!(::log::DEBUG, $($arg)*) })
+ ($($arg:tt)*) => (if cfg!(debug_assertions) { log!(::log::DEBUG, $($arg)*) })
}
/// A macro to test whether a log level is enabled for the current module.
macro_rules! log_enabled {
($lvl:expr) => ({
let lvl = $lvl;
- (lvl != ::log::DEBUG || cfg!(not(ndebug))) &&
+ (lvl != ::log::DEBUG || cfg!(debug_assertions)) &&
lvl <= ::log::log_level() &&
::log::mod_enabled(lvl, module_path!())
})
fn reseed(&mut self, seed: &'a [u32]) {
// reset state
- self.init(&[0u32; KEY_WORDS]);
+ self.init(&[0; KEY_WORDS]);
// set key in place
let key = &mut self.state[4 .. 4+KEY_WORDS];
for (k, s) in key.iter_mut().zip(seed.iter()) {
fn test_rng_true_values() {
// Test vectors 1 and 2 from
// http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04
- let seed : &[_] = &[0u32; 8];
+ let seed : &[_] = &[0; 8];
let mut ra: ChaChaRng = SeedableRng::from_seed(seed);
let v = (0..16).map(|_| ra.next_u32()).collect::<Vec<_>>();
#[test]
fn test_rng_clone() {
- let seed : &[_] = &[0u32; 8];
+ let seed : &[_] = &[0; 8];
let mut rng: ChaChaRng = SeedableRng::from_seed(seed);
let mut clone = rng.clone();
for _ in 0..16 {
///
/// This gives a uniform distribution (assuming the RNG used to sample
/// it is itself uniform & the `SampleRange` implementation for the
-/// given type is correct), even for edge cases like `low = 0u8`,
-/// `high = 170u8`, for which a naive modulo operation would return
+/// given type is correct), even for edge cases like `low = 0`,
+/// `high = 170`, for which a naive modulo operation would return
/// numbers less than 85 with double the probability to those greater
/// than 85.
///
fn reseed(&mut self, seed: &'a [u32]) {
// make the seed into [seed[0], seed[1], ..., seed[seed.len()
// - 1], 0, 0, ...], to fill rng.rsl.
- let seed_iter = seed.iter().cloned().chain(repeat(0u32));
+ let seed_iter = seed.iter().cloned().chain(repeat(0));
for (rsl_elem, seed_elem) in self.rsl.iter_mut().zip(seed_iter) {
*rsl_elem = seed_elem;
fn reseed(&mut self, seed: &'a [u64]) {
// make the seed into [seed[0], seed[1], ..., seed[seed.len()
// - 1], 0, 0, ...], to fill rng.rsl.
- let seed_iter = seed.iter().cloned().chain(repeat(0u64));
+ let seed_iter = seed.iter().cloned().chain(repeat(0));
for (rsl_elem, seed_elem) in self.rsl.iter_mut().zip(seed_iter) {
*rsl_elem = seed_elem;
/// ```rust
/// use std::rand::{thread_rng, Rng};
///
- /// let mut v = [0u8; 13579];
+ /// let mut v = [0; 13579];
/// thread_rng().fill_bytes(&mut v);
/// println!("{:?}", v.as_slice());
/// ```
#[cfg(test)]
mod tests {
- use std::prelude::v1::*;
use std::rand::{Rng, thread_rng, Open01, Closed01};
struct ConstantRng(u64);
const FILL_BYTES_V_LEN: uint = 13579;
#[test]
fn test_rng_fill_bytes() {
- let mut v = repeat(0u8).take(FILL_BYTES_V_LEN).collect::<Vec<_>>();
+ let mut v = repeat(0).take(FILL_BYTES_V_LEN).collect::<Vec<_>>();
::test::rng().fill_bytes(&mut v);
// Sanity test: if we've gotten here, `fill_bytes` has not infinitely
fn test_seekable_mem_writer() {
let mut writer = SeekableMemWriter::new();
assert_eq!(writer.tell(), Ok(0));
- writer.write(&[0]).unwrap();
+ writer.write_all(&[0]).unwrap();
assert_eq!(writer.tell(), Ok(1));
- writer.write(&[1, 2, 3]).unwrap();
- writer.write(&[4, 5, 6, 7]).unwrap();
+ writer.write_all(&[1, 2, 3]).unwrap();
+ writer.write_all(&[4, 5, 6, 7]).unwrap();
assert_eq!(writer.tell(), Ok(8));
let b: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7];
assert_eq!(writer.get_ref(), b);
writer.seek(0, old_io::SeekSet).unwrap();
assert_eq!(writer.tell(), Ok(0));
- writer.write(&[3, 4]).unwrap();
+ writer.write_all(&[3, 4]).unwrap();
let b: &[_] = &[3, 4, 2, 3, 4, 5, 6, 7];
assert_eq!(writer.get_ref(), b);
writer.seek(1, old_io::SeekCur).unwrap();
- writer.write(&[0, 1]).unwrap();
+ writer.write_all(&[0, 1]).unwrap();
let b: &[_] = &[3, 4, 2, 0, 1, 5, 6, 7];
assert_eq!(writer.get_ref(), b);
writer.seek(-1, old_io::SeekEnd).unwrap();
- writer.write(&[1, 2]).unwrap();
+ writer.write_all(&[1, 2]).unwrap();
let b: &[_] = &[3, 4, 2, 0, 1, 5, 6, 1, 2];
assert_eq!(writer.get_ref(), b);
writer.seek(1, old_io::SeekEnd).unwrap();
- writer.write(&[1]).unwrap();
+ writer.write_all(&[1]).unwrap();
let b: &[_] = &[3, 4, 2, 0, 1, 5, 6, 1, 2, 0, 1];
assert_eq!(writer.get_ref(), b);
}
fn seek_past_end() {
let mut r = SeekableMemWriter::new();
r.seek(10, old_io::SeekSet).unwrap();
- assert!(r.write(&[3]).is_ok());
+ assert!(r.write_all(&[3]).is_ok());
}
#[test]
b.iter(|| {
let mut wr = SeekableMemWriter::new();
for _ in 0..times {
- wr.write(&src).unwrap();
+ wr.write_all(&src).unwrap();
}
let v = wr.unwrap();
#[inline(never)]
fn vuint_at_slow(data: &[u8], start: uint) -> DecodeResult<Res> {
let a = data[start];
- if a & 0x80u8 != 0u8 {
- return Ok(Res {val: (a & 0x7fu8) as uint, next: start + 1});
+ if a & 0x80 != 0 {
+ return Ok(Res {val: (a & 0x7f) as uint, next: start + 1});
}
- if a & 0x40u8 != 0u8 {
- return Ok(Res {val: ((a & 0x3fu8) as uint) << 8 |
+ if a & 0x40 != 0 {
+ return Ok(Res {val: ((a & 0x3f) as uint) << 8 |
(data[start + 1] as uint),
next: start + 2});
}
- if a & 0x20u8 != 0u8 {
- return Ok(Res {val: ((a & 0x1fu8) as uint) << 16 |
+ if a & 0x20 != 0 {
+ return Ok(Res {val: ((a & 0x1f) as uint) << 16 |
(data[start + 1] as uint) << 8 |
(data[start + 2] as uint),
next: start + 3});
}
- if a & 0x10u8 != 0u8 {
- return Ok(Res {val: ((a & 0x0fu8) as uint) << 24 |
+ if a & 0x10 != 0 {
+ return Ok(Res {val: ((a & 0x0f) as uint) << 24 |
(data[start + 1] as uint) << 16 |
(data[start + 2] as uint) << 8 |
(data[start + 3] as uint),
fn write_sized_vuint<W: Writer>(w: &mut W, n: uint, size: uint) -> EncodeResult {
match size {
- 1 => w.write_all(&[0x80u8 | (n as u8)]),
- 2 => w.write_all(&[0x40u8 | ((n >> 8) as u8), n as u8]),
- 3 => w.write_all(&[0x20u8 | ((n >> 16) as u8), (n >> 8) as u8,
+ 1 => w.write_all(&[0x80 | (n as u8)]),
+ 2 => w.write_all(&[0x40 | ((n >> 8) as u8), n as u8]),
+ 3 => w.write_all(&[0x20 | ((n >> 16) as u8), (n >> 8) as u8,
n as u8]),
- 4 => w.write_all(&[0x10u8 | ((n >> 24) as u8), (n >> 16) as u8,
+ 4 => w.write_all(&[0x10 | ((n >> 24) as u8), (n >> 16) as u8,
(n >> 8) as u8, n as u8]),
_ => Err(old_io::IoError {
kind: old_io::OtherIoError,
// Write a placeholder four-byte size.
self.size_positions.push(try!(self.writer.tell()) as uint);
- let zeroes: &[u8] = &[0u8, 0u8, 0u8, 0u8];
+ let zeroes: &[u8] = &[0, 0, 0, 0];
self.writer.write_all(zeroes)
}
#[bench]
pub fn vuint_at_A_aligned(b: &mut Bencher) {
- let data = (0i32..4*100).map(|i| {
+ let data = (0..4*100).map(|i| {
match i % 2 {
- 0 => 0x80u8,
+ 0 => 0x80,
_ => i as u8,
}
}).collect::<Vec<_>>();
#[bench]
pub fn vuint_at_A_unaligned(b: &mut Bencher) {
- let data = (0i32..4*100+1).map(|i| {
+ let data = (0..4*100+1).map(|i| {
match i % 2 {
- 1 => 0x80u8,
+ 1 => 0x80,
_ => i as u8
}
}).collect::<Vec<_>>();
#[bench]
pub fn vuint_at_D_aligned(b: &mut Bencher) {
- let data = (0i32..4*100).map(|i| {
+ let data = (0..4*100).map(|i| {
match i % 4 {
- 0 => 0x10u8,
+ 0 => 0x10,
3 => i as u8,
- _ => 0u8
+ _ => 0
}
}).collect::<Vec<_>>();
let mut sum = 0;
#[bench]
pub fn vuint_at_D_unaligned(b: &mut Bencher) {
- let data = (0i32..4*100+1).map(|i| {
+ let data = (0..4*100+1).map(|i| {
match i % 4 {
- 1 => 0x10u8,
+ 1 => 0x10,
0 => i as u8,
- _ => 0u8
+ _ => 0
}
}).collect::<Vec<_>>();
let mut sum = 0;
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(os)]
+#![feature(path)]
+#![feature(fs)]
+#![feature(io)]
#![cfg_attr(test, feature(test))]
extern crate arena;
pub const tag_item_trait_item: uint = 0x39;
pub const tag_item_trait_ref: uint = 0x3a;
-pub const tag_item_super_trait_ref: uint = 0x3b;
// discriminator value for variants
pub const tag_disr_val: uint = 0x3c;
pub const tag_attribute_is_sugared_doc: uint = 0x8c;
-pub const tag_trait_def_bounds: uint = 0x8d;
-
pub const tag_items_data_region: uint = 0x8e;
pub const tag_region_param_def: uint = 0x8f;
pub const tag_macro_def_body: uint = 0x9f;
pub const tag_paren_sugar: uint = 0xa0;
+
+pub const tag_codemap: uint = 0xa1;
+pub const tag_codemap_filemap: uint = 0xa2;
+
+pub const tag_item_super_predicates: uint = 0xa3;
use metadata::loader;
use metadata::loader::CratePaths;
+use std::path::{Path, PathBuf};
use std::rc::Rc;
use syntax::ast;
use syntax::abi;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
-use syntax::codemap::{Span, mk_sp};
+use syntax::codemap::{self, Span, mk_sp, Pos};
use syntax::parse;
use syntax::parse::token::InternedString;
use syntax::parse::token;
// Extra info about a crate loaded for plugins or exported macros.
struct ExtensionCrate {
metadata: PMDSource,
- dylib: Option<Path>,
+ dylib: Option<PathBuf>,
target_only: bool,
}
// Maintain a reference to the top most crate.
let root = if root.is_some() { root } else { &crate_paths };
- let cnum_map = self.resolve_crate_deps(root, lib.metadata.as_slice(), span);
+ let loader::Library { dylib, rlib, metadata } = lib;
- let loader::Library{ dylib, rlib, metadata } = lib;
+ let cnum_map = self.resolve_crate_deps(root, metadata.as_slice(), span);
+ let codemap_import_info = import_codemap(self.sess.codemap(), &metadata);
let cmeta = Rc::new( cstore::crate_metadata {
name: name.to_string(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
+ codemap_import_info: codemap_import_info,
span: span,
});
// overridden in plugin/load.rs
export: false,
use_locally: false,
+ allow_internal_unstable: false,
body: body,
});
}
/// Look for a plugin registrar. Returns library path and symbol name.
- pub fn find_plugin_registrar(&mut self, span: Span, name: &str) -> Option<(Path, String)> {
+ pub fn find_plugin_registrar(&mut self, span: Span, name: &str)
+ -> Option<(PathBuf, String)> {
let ekrate = self.read_extension_crate(span, &CrateInfo {
name: name.to_string(),
ident: name.to_string(),
.map(|id| decoder::get_symbol(ekrate.metadata.as_slice(), id));
match (ekrate.dylib.as_ref(), registrar) {
- (Some(dylib), Some(reg)) => Some((dylib.clone(), reg)),
+ (Some(dylib), Some(reg)) => Some((dylib.to_path_buf(), reg)),
(None, Some(_)) => {
let message = format!("plugin `{}` only found in rlib format, \
but must be available in dylib format",
}
}
}
+
+/// Imports the codemap from an external crate into the codemap of the crate
+/// currently being compiled (the "local crate").
+///
+/// The import algorithm works analogous to how AST items are inlined from an
+/// external crate's metadata:
+/// For every FileMap in the external codemap an 'inline' copy is created in the
+/// local codemap. The correspondence relation between external and local
+/// FileMaps is recorded in the `ImportedFileMap` objects returned from this
+/// function. When an item from an external crate is later inlined into this
+/// crate, this correspondence information is used to translate the span
+/// information of the inlined item so that it refers the correct positions in
+/// the local codemap (see `astencode::DecodeContext::tr_span()`).
+///
+/// The import algorithm in the function below will reuse FileMaps already
+/// existing in the local codemap. For example, even if the FileMap of some
+/// source file of libstd gets imported many times, there will only ever be
+/// one FileMap object for the corresponding file in the local codemap.
+///
+/// Note that imported FileMaps do not actually contain the source code of the
+/// file they represent, just information about length, line breaks, and
+/// multibyte characters. This information is enough to generate valid debuginfo
+/// for items inlined from other crates.
+fn import_codemap(local_codemap: &codemap::CodeMap,
+ metadata: &MetadataBlob)
+ -> Vec<cstore::ImportedFileMap> {
+ let external_codemap = decoder::get_imported_filemaps(metadata.as_slice());
+
+ let imported_filemaps = external_codemap.into_iter().map(|filemap_to_import| {
+ // Try to find an existing FileMap that can be reused for the filemap to
+ // be imported. A FileMap is reusable if it is exactly the same, just
+ // positioned at a different offset within the codemap.
+ let reusable_filemap = {
+ local_codemap.files
+ .borrow()
+ .iter()
+ .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import))
+ .map(|rc| rc.clone())
+ };
+
+ match reusable_filemap {
+ Some(fm) => {
+ cstore::ImportedFileMap {
+ original_start_pos: filemap_to_import.start_pos,
+ original_end_pos: filemap_to_import.end_pos,
+ translated_filemap: fm
+ }
+ }
+ None => {
+ // We can't reuse an existing FileMap, so allocate a new one
+ // containing the information we need.
+ let codemap::FileMap {
+ name,
+ start_pos,
+ end_pos,
+ lines,
+ multibyte_chars,
+ ..
+ } = filemap_to_import;
+
+ let source_length = (end_pos - start_pos).to_usize();
+
+ // Translate line-start positions and multibyte character
+ // position into frame of reference local to file.
+ // `CodeMap::new_imported_filemap()` will then translate those
+ // coordinates to their new global frame of reference when the
+ // offset of the FileMap is known.
+ let lines = lines.into_inner().map_in_place(|pos| pos - start_pos);
+ let multibyte_chars = multibyte_chars
+ .into_inner()
+ .map_in_place(|mbc|
+ codemap::MultiByteChar {
+ pos: mbc.pos + start_pos,
+ bytes: mbc.bytes
+ });
+
+ let local_version = local_codemap.new_imported_filemap(name,
+ source_length,
+ lines,
+ multibyte_chars);
+ cstore::ImportedFileMap {
+ original_start_pos: start_pos,
+ original_end_pos: end_pos,
+ translated_filemap: local_version
+ }
+ }
+ }
+ }).collect();
+
+ return imported_filemaps;
+
+ fn are_equal_modulo_startpos(fm1: &codemap::FileMap,
+ fm2: &codemap::FileMap)
+ -> bool {
+ if fm1.name != fm2.name {
+ return false;
+ }
+
+ let lines1 = fm1.lines.borrow();
+ let lines2 = fm2.lines.borrow();
+
+ if lines1.len() != lines2.len() {
+ return false;
+ }
+
+ for (&line1, &line2) in lines1.iter().zip(lines2.iter()) {
+ if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) {
+ return false;
+ }
+ }
+
+ let multibytes1 = fm1.multibyte_chars.borrow();
+ let multibytes2 = fm2.multibyte_chars.borrow();
+
+ if multibytes1.len() != multibytes2.len() {
+ return false;
+ }
+
+ for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) {
+ if (mb1.bytes != mb2.bytes) ||
+ ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) {
+ return false;
+ }
+ }
+
+ true
+ }
+}
decoder::get_provided_trait_methods(cstore.intr.clone(), &*cdata, def.node, tcx)
}
-pub fn get_supertraits<'tcx>(tcx: &ty::ctxt<'tcx>,
- def: ast::DefId)
- -> Vec<Rc<ty::TraitRef<'tcx>>> {
- let cstore = &tcx.sess.cstore;
- let cdata = cstore.get_crate_data(def.krate);
- decoder::get_supertraits(&*cdata, def.node, tcx)
-}
-
pub fn get_type_name_if_impl(cstore: &cstore::CStore, def: ast::DefId)
-> Option<ast::Name> {
let cdata = cstore.get_crate_data(def.krate);
decoder::get_predicates(&*cdata, def.node, tcx)
}
+pub fn get_super_predicates<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId)
+ -> ty::GenericPredicates<'tcx>
+{
+ let cstore = &tcx.sess.cstore;
+ let cdata = cstore.get_crate_data(def.krate);
+ decoder::get_super_predicates(&*cdata, def.node, tcx)
+}
+
pub fn get_field_type<'tcx>(tcx: &ty::ctxt<'tcx>, class_id: ast::DefId,
def: ast::DefId) -> ty::TypeScheme<'tcx> {
let cstore = &tcx.sess.cstore;
use std::cell::RefCell;
use std::rc::Rc;
+use std::path::PathBuf;
use flate::Bytes;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax::codemap;
use syntax::parse::token::IdentInterner;
// A map from external crate numbers (as decoded from some crate file) to
MetadataArchive(loader::ArchiveMetadata),
}
+/// Holds information about a codemap::FileMap imported from another crate.
+/// See creader::import_codemap() for more information.
+pub struct ImportedFileMap {
+ /// This FileMap's byte-offset within the codemap of its original crate
+ pub original_start_pos: codemap::BytePos,
+ /// The end of this FileMap within the codemap of its original crate
+ pub original_end_pos: codemap::BytePos,
+ /// The imported FileMap's representation within the local codemap
+ pub translated_filemap: Rc<codemap::FileMap>
+}
+
pub struct crate_metadata {
pub name: String,
pub data: MetadataBlob,
pub cnum_map: cnum_map,
pub cnum: ast::CrateNum,
- pub span: Span,
+ pub codemap_import_info: Vec<ImportedFileMap>,
+ pub span: codemap::Span,
}
#[derive(Copy, Debug, PartialEq, Clone)]
// must be non-None.
#[derive(PartialEq, Clone)]
pub struct CrateSource {
- pub dylib: Option<(Path, PathKind)>,
- pub rlib: Option<(Path, PathKind)>,
+ pub dylib: Option<(PathBuf, PathKind)>,
+ pub rlib: Option<(PathBuf, PathKind)>,
pub cnum: ast::CrateNum,
}
// topological sort of all crates putting the leaves at the right-most
// positions.
pub fn get_used_crates(&self, prefer: LinkagePreference)
- -> Vec<(ast::CrateNum, Option<Path>)> {
+ -> Vec<(ast::CrateNum, Option<PathBuf>)> {
let mut ordering = Vec::new();
fn visit(cstore: &CStore, cnum: ast::CrateNum,
ordering: &mut Vec<ast::CrateNum>) {
use metadata::csearch;
use metadata::cstore;
use metadata::tydecode::{parse_ty_data, parse_region_data, parse_def_id,
- parse_type_param_def_data, parse_bounds_data,
- parse_bare_fn_ty_data, parse_trait_ref_data,
- parse_predicate_data};
+ parse_type_param_def_data, parse_bare_fn_ty_data,
+ parse_trait_ref_data, parse_predicate_data};
use middle::def;
use middle::lang_items;
use middle::subst;
use std::collections::HashMap;
use std::hash::{self, Hash, SipHasher};
-use std::num::FromPrimitive;
-use std::num::Int;
-use std::old_io;
+use std::io::prelude::*;
+use std::io;
+use std::num::{FromPrimitive, Int};
use std::rc::Rc;
use std::slice::bytes;
use std::str;
doc_trait_ref(tp, tcx, cdata)
}
-fn doc_bounds<'tcx>(doc: rbml::Doc, tcx: &ty::ctxt<'tcx>, cdata: Cmd)
- -> ty::ParamBounds<'tcx> {
- parse_bounds_data(doc.data, cdata.cnum, doc.start, tcx,
- |_, did| translate_def_id(cdata, did))
-}
-
-fn trait_def_bounds<'tcx>(doc: rbml::Doc, tcx: &ty::ctxt<'tcx>, cdata: Cmd)
- -> ty::ParamBounds<'tcx> {
- let d = reader::get_doc(doc, tag_trait_def_bounds);
- doc_bounds(d, tcx, cdata)
-}
-
fn enum_variant_ids(item: rbml::Doc, cdata: Cmd) -> Vec<ast::DefId> {
let mut ids: Vec<ast::DefId> = Vec::new();
let v = tag_items_data_item_variant;
{
let item_doc = lookup_item(item_id, cdata.data());
let generics = doc_generics(item_doc, tcx, cdata, tag_item_generics);
- let bounds = trait_def_bounds(item_doc, tcx, cdata);
let unsafety = parse_unsafety(item_doc);
let associated_type_names = parse_associated_type_names(item_doc);
let paren_sugar = parse_paren_sugar(item_doc);
paren_sugar: paren_sugar,
unsafety: unsafety,
generics: generics,
- bounds: bounds,
trait_ref: item_trait_ref(item_doc, tcx, cdata),
associated_type_names: associated_type_names,
}
doc_predicates(item_doc, tcx, cdata, tag_item_generics)
}
+pub fn get_super_predicates<'tcx>(cdata: Cmd,
+ item_id: ast::NodeId,
+ tcx: &ty::ctxt<'tcx>)
+ -> ty::GenericPredicates<'tcx>
+{
+ let item_doc = lookup_item(item_id, cdata.data());
+ doc_predicates(item_doc, tcx, cdata, tag_item_super_predicates)
+}
+
pub fn get_type<'tcx>(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt<'tcx>)
-> ty::TypeScheme<'tcx>
{
return result;
}
-/// Returns the supertraits of the given trait.
-pub fn get_supertraits<'tcx>(cdata: Cmd, id: ast::NodeId, tcx: &ty::ctxt<'tcx>)
- -> Vec<Rc<ty::TraitRef<'tcx>>> {
- let mut results = Vec::new();
- let item_doc = lookup_item(id, cdata.data());
- reader::tagged_docs(item_doc, tag_item_super_trait_ref, |trait_doc| {
- // NB. Only reads the ones that *aren't* builtin-bounds. See also
- // get_trait_def() for collecting the builtin bounds.
- // FIXME(#8559): The builtin bounds shouldn't be encoded in the first place.
- let trait_ref = doc_trait_ref(trait_doc, tcx, cdata);
- if tcx.lang_items.to_builtin_kind(trait_ref.def_id).is_none() {
- results.push(trait_ref);
- }
- true
- });
- return results;
-}
-
pub fn get_type_name_if_impl(cdata: Cmd,
node_id: ast::NodeId) -> Option<ast::Name> {
let item = lookup_item(node_id, cdata.data());
}
fn list_crate_attributes(md: rbml::Doc, hash: &Svh,
- out: &mut old_io::Writer) -> old_io::IoResult<()> {
+ out: &mut io::Write) -> io::Result<()> {
try!(write!(out, "=Crate Attributes ({})=\n", *hash));
let r = get_attributes(md);
return deps;
}
-fn list_crate_deps(data: &[u8], out: &mut old_io::Writer) -> old_io::IoResult<()> {
+fn list_crate_deps(data: &[u8], out: &mut io::Write) -> io::Result<()> {
try!(write!(out, "=External Dependencies=\n"));
for dep in &get_crate_deps(data) {
try!(write!(out, "{} {}-{}\n", dep.cnum, dep.name, dep.hash));
maybe_get_crate_name(data).expect("no crate name in crate")
}
-pub fn list_crate_metadata(bytes: &[u8], out: &mut old_io::Writer) -> old_io::IoResult<()> {
+pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Write) -> io::Result<()> {
let hash = get_crate_hash(bytes);
let md = rbml::Doc::new(bytes);
try!(list_crate_attributes(md, &hash, out));
}
}
-
pub fn is_default_trait<'tcx>(cdata: Cmd, id: ast::NodeId) -> bool {
let item_doc = lookup_item(id, cdata.data());
match item_family(item_doc) {
_ => false
}
}
+
+pub fn get_imported_filemaps(metadata: &[u8]) -> Vec<codemap::FileMap> {
+ let crate_doc = rbml::Doc::new(metadata);
+ let cm_doc = reader::get_doc(crate_doc, tag_codemap);
+
+ let mut filemaps = vec![];
+
+ reader::tagged_docs(cm_doc, tag_codemap_filemap, |filemap_doc| {
+ let mut decoder = reader::Decoder::new(filemap_doc);
+ let filemap: codemap::FileMap = Decodable::decode(&mut decoder).unwrap();
+ filemaps.push(filemap);
+ true
+ });
+
+ return filemaps;
+}
tyencode::enc_region(rbml_w, ty_str_ctxt, r);
}
-fn encode_bounds<'a, 'tcx>(rbml_w: &mut Encoder,
- ecx: &EncodeContext<'a, 'tcx>,
- bounds: &ty::ParamBounds<'tcx>,
- tag: uint) {
- rbml_w.start_tag(tag);
-
- let ty_str_ctxt = &tyencode::ctxt { diag: ecx.diag,
- ds: def_to_string,
- tcx: ecx.tcx,
- abbrevs: &ecx.type_abbrevs };
- tyencode::enc_bounds(rbml_w, ty_str_ctxt, bounds);
-
- rbml_w.end_tag();
-}
-
fn encode_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
rbml_w: &mut Encoder,
typ: Ty<'tcx>) {
tcx: ecx.tcx,
abbrevs: &ecx.type_abbrevs
};
+
for param in generics.types.iter() {
rbml_w.start_tag(tag_type_param_def);
tyencode::enc_type_param_def(rbml_w, ty_str_ctxt, param);
rbml_w.end_tag();
}
+ encode_predicates_in_current_doc(rbml_w, ecx, predicates);
+
+ rbml_w.end_tag();
+}
+
+fn encode_predicates_in_current_doc<'a,'tcx>(rbml_w: &mut Encoder,
+ ecx: &EncodeContext<'a,'tcx>,
+ predicates: &ty::GenericPredicates<'tcx>)
+{
+ let ty_str_ctxt = &tyencode::ctxt {
+ diag: ecx.diag,
+ ds: def_to_string,
+ tcx: ecx.tcx,
+ abbrevs: &ecx.type_abbrevs
+ };
+
for (space, _, predicate) in predicates.predicates.iter_enumerated() {
rbml_w.start_tag(tag_predicate);
rbml_w.end_tag();
}
+}
+fn encode_predicates<'a,'tcx>(rbml_w: &mut Encoder,
+ ecx: &EncodeContext<'a,'tcx>,
+ predicates: &ty::GenericPredicates<'tcx>,
+ tag: uint)
+{
+ rbml_w.start_tag(tag);
+ encode_predicates_in_current_doc(rbml_w, ecx, predicates);
rbml_w.end_tag();
}
encode_paren_sugar(rbml_w, trait_def.paren_sugar);
encode_associated_type_names(rbml_w, &trait_def.associated_type_names);
encode_generics(rbml_w, ecx, &trait_def.generics, &trait_predicates, tag_item_generics);
+ encode_predicates(rbml_w, ecx, &ty::lookup_super_predicates(tcx, def_id),
+ tag_item_super_predicates);
encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref);
encode_name(rbml_w, item.ident.name);
encode_attributes(rbml_w, &item.attrs);
}
encode_path(rbml_w, path.clone());
- encode_bounds(rbml_w, ecx, &trait_def.bounds, tag_trait_def_bounds);
-
// Encode the implementations of this trait.
encode_extension_implementations(ecx, rbml_w, def_id);
}
}
+fn encode_codemap(ecx: &EncodeContext, rbml_w: &mut Encoder) {
+ rbml_w.start_tag(tag_codemap);
+ let codemap = ecx.tcx.sess.codemap();
+
+ for filemap in &codemap.files.borrow()[..] {
+
+ if filemap.lines.borrow().len() == 0 || filemap.is_imported() {
+ // No need to export empty filemaps, as they can't contain spans
+ // that need translation.
+ // Also no need to re-export imported filemaps, as any downstream
+ // crate will import them from their original source.
+ continue;
+ }
+
+ rbml_w.start_tag(tag_codemap_filemap);
+ filemap.encode(rbml_w);
+ rbml_w.end_tag();
+ }
+
+ rbml_w.end_tag();
+}
+
/// Serialize the text of the exported macros
fn encode_macro_defs(rbml_w: &mut Encoder,
krate: &ast::Crate) {
lang_item_bytes: u64,
native_lib_bytes: u64,
plugin_registrar_fn_bytes: u64,
+ codemap_bytes: u64,
macro_defs_bytes: u64,
impl_bytes: u64,
misc_bytes: u64,
lang_item_bytes: 0,
native_lib_bytes: 0,
plugin_registrar_fn_bytes: 0,
+ codemap_bytes: 0,
macro_defs_bytes: 0,
impl_bytes: 0,
misc_bytes: 0,
encode_plugin_registrar_fn(&ecx, &mut rbml_w);
stats.plugin_registrar_fn_bytes = rbml_w.writer.tell().unwrap() - i;
+ // Encode codemap
+ i = rbml_w.writer.tell().unwrap();
+ encode_codemap(&ecx, &mut rbml_w);
+ stats.codemap_bytes = rbml_w.writer.tell().unwrap() - i;
+
// Encode macro definitions
i = rbml_w.writer.tell().unwrap();
encode_macro_defs(&mut rbml_w, krate);
println!(" lang item bytes: {}", stats.lang_item_bytes);
println!(" native bytes: {}", stats.native_lib_bytes);
println!("plugin registrar bytes: {}", stats.plugin_registrar_fn_bytes);
+ println!(" codemap bytes: {}", stats.codemap_bytes);
println!(" macro def bytes: {}", stats.macro_defs_bytes);
println!(" impl bytes: {}", stats.impl_bytes);
println!(" misc bytes: {}", stats.misc_bytes);
use std::collections::HashSet;
use std::env;
-use std::os;
-use std::old_io::fs::PathExtensions;
-use std::old_io::fs;
+use std::fs;
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
use util::fs as myfs;
use session::search_paths::{SearchPaths, PathKind};
FileMatches => found = true,
FileDoesntMatch => ()
}
- visited_dirs.insert(path.as_vec().to_vec());
+ visited_dirs.insert(path.to_path_buf());
}
debug!("filesearch: searching lib path");
let tlib_path = make_target_lib_path(self.sysroot,
self.triple);
- if !visited_dirs.contains(tlib_path.as_vec()) {
+ if !visited_dirs.contains(&tlib_path) {
match f(&tlib_path, PathKind::All) {
FileMatches => found = true,
FileDoesntMatch => ()
}
}
- visited_dirs.insert(tlib_path.as_vec().to_vec());
+ visited_dirs.insert(tlib_path);
// Try RUST_PATH
if !found {
let rustpath = rust_path();
let tlib_path = make_rustpkg_lib_path(
self.sysroot, path, self.triple);
debug!("is {} in visited_dirs? {}", tlib_path.display(),
- visited_dirs.contains(&tlib_path.as_vec().to_vec()));
+ visited_dirs.contains(&tlib_path));
- if !visited_dirs.contains(tlib_path.as_vec()) {
- visited_dirs.insert(tlib_path.as_vec().to_vec());
+ if !visited_dirs.contains(&tlib_path) {
+ visited_dirs.insert(tlib_path.clone());
// Don't keep searching the RUST_PATH if one match turns up --
// if we did, we'd get a "multiple matching crates" error
match f(&tlib_path, PathKind::All) {
}
}
- pub fn get_lib_path(&self) -> Path {
+ pub fn get_lib_path(&self) -> PathBuf {
make_target_lib_path(self.sysroot, self.triple)
}
{
self.for_each_lib_search_path(|lib_search_path, kind| {
debug!("searching {}", lib_search_path.display());
- match fs::readdir(lib_search_path) {
+ match fs::read_dir(lib_search_path) {
Ok(files) => {
+ let files = files.filter_map(|p| p.ok().map(|s| s.path()))
+ .collect::<Vec<_>>();
let mut rslt = FileDoesntMatch;
- fn is_rlib(p: & &Path) -> bool {
- p.extension_str() == Some("rlib")
+ fn is_rlib(p: &Path) -> bool {
+ p.extension().and_then(|s| s.to_str()) == Some("rlib")
}
// Reading metadata out of rlibs is faster, and if we find both
// an rlib and a dylib we only read one of the files of
}
// Returns a list of directories where target-specific dylibs might be located.
- pub fn get_dylib_search_paths(&self) -> Vec<Path> {
+ pub fn get_dylib_search_paths(&self) -> Vec<PathBuf> {
let mut paths = Vec::new();
self.for_each_lib_search_path(|lib_search_path, _| {
- paths.push(lib_search_path.clone());
+ paths.push(lib_search_path.to_path_buf());
FileDoesntMatch
});
paths
}
// Returns a list of directories where target-specific tool binaries are located.
- pub fn get_tools_search_paths(&self) -> Vec<Path> {
- let mut p = Path::new(self.sysroot);
- p.push(find_libdir(self.sysroot));
- p.push(rustlibdir());
- p.push(self.triple);
+ pub fn get_tools_search_paths(&self) -> Vec<PathBuf> {
+ let mut p = PathBuf::new(self.sysroot);
+ p.push(&find_libdir(self.sysroot));
+ p.push(&rustlibdir());
+ p.push(&self.triple);
p.push("bin");
vec![p]
}
}
-pub fn relative_target_lib_path(sysroot: &Path, target_triple: &str) -> Path {
- let mut p = Path::new(find_libdir(sysroot));
+pub fn relative_target_lib_path(sysroot: &Path, target_triple: &str) -> PathBuf {
+ let mut p = PathBuf::new(&find_libdir(sysroot));
assert!(p.is_relative());
- p.push(rustlibdir());
+ p.push(&rustlibdir());
p.push(target_triple);
p.push("lib");
p
}
fn make_target_lib_path(sysroot: &Path,
- target_triple: &str) -> Path {
+ target_triple: &str) -> PathBuf {
sysroot.join(&relative_target_lib_path(sysroot, target_triple))
}
fn make_rustpkg_lib_path(sysroot: &Path,
dir: &Path,
- triple: &str) -> Path {
- let mut p = dir.join(find_libdir(sysroot));
+ triple: &str) -> PathBuf {
+ let mut p = dir.join(&find_libdir(sysroot));
p.push(triple);
p
}
-pub fn get_or_default_sysroot() -> Path {
+pub fn get_or_default_sysroot() -> PathBuf {
// Follow symlinks. If the resolved path is relative, make it absolute.
- fn canonicalize(path: Option<Path>) -> Option<Path> {
- path.and_then(|path|
+ fn canonicalize(path: Option<PathBuf>) -> Option<PathBuf> {
+ path.and_then(|path| {
match myfs::realpath(&path) {
Ok(canon) => Some(canon),
Err(e) => panic!("failed to get realpath: {}", e),
- })
+ }
+ })
}
- match canonicalize(os::self_exe_name()) {
+ match canonicalize(env::current_exe().ok()) {
Some(mut p) => { p.pop(); p.pop(); p }
None => panic!("can't determine value for sysroot")
}
/// $HOME/.rust
/// DIR/.rust for any DIR that's the current working directory
/// or an ancestor of it
-pub fn rust_path() -> Vec<Path> {
- let mut env_rust_path: Vec<Path> = match get_rust_path() {
+pub fn rust_path() -> Vec<PathBuf> {
+ let mut env_rust_path: Vec<PathBuf> = match get_rust_path() {
Some(env_path) => {
let env_path_components =
env_path.split(PATH_ENTRY_SEPARATOR);
- env_path_components.map(|s| Path::new(s)).collect()
+ env_path_components.map(|s| PathBuf::new(s)).collect()
}
None => Vec::new()
};
- let mut cwd = os::getcwd().unwrap();
+ let cwd = env::current_dir().unwrap();
// now add in default entries
let cwd_dot_rust = cwd.join(".rust");
if !env_rust_path.contains(&cwd_dot_rust) {
if !env_rust_path.contains(&cwd) {
env_rust_path.push(cwd.clone());
}
- loop {
- if { let f = cwd.filename(); f.is_none() || f.unwrap() == b".." } {
- break
- }
- cwd.set_filename(".rust");
- if !env_rust_path.contains(&cwd) && cwd.exists() {
- env_rust_path.push(cwd.clone());
+ let mut cur = &*cwd;
+ while let Some(parent) = cur.parent() {
+ let candidate = parent.join(".rust");
+ if !env_rust_path.contains(&candidate) && candidate.exists() {
+ env_rust_path.push(candidate.clone());
}
- cwd.pop();
+ cur = parent;
}
- if let Some(h) = os::homedir() {
+ if let Some(h) = env::home_dir() {
let p = h.join(".rust");
if !env_rust_path.contains(&p) && p.exists() {
env_rust_path.push(p);
match option_env!("CFG_LIBDIR_RELATIVE") {
Some(libdir) if libdir != "lib" => return libdir.to_string(),
- _ => if sysroot.join(primary_libdir_name()).join(rustlibdir()).exists() {
+ _ => if sysroot.join(&primary_libdir_name()).join(&rustlibdir()).exists() {
return primary_libdir_name();
} else {
return secondary_libdir_name();
use syntax::codemap::Span;
use syntax::diagnostic::SpanHandler;
use util::fs;
+use util::common;
use rustc_back::target::Target;
-use std::ffi::CString;
use std::cmp;
use std::collections::HashMap;
-use std::old_io::fs::PathExtensions;
-use std::old_io;
+use std::io::prelude::*;
+use std::io;
+use std::path::{Path, PathBuf};
use std::ptr;
use std::slice;
use std::time::Duration;
use flate;
pub struct CrateMismatch {
- path: Path,
+ path: PathBuf,
got: String,
}
}
pub struct Library {
- pub dylib: Option<(Path, PathKind)>,
- pub rlib: Option<(Path, PathKind)>,
+ pub dylib: Option<(PathBuf, PathKind)>,
+ pub rlib: Option<(PathBuf, PathKind)>,
pub metadata: MetadataBlob,
}
pub struct CratePaths {
pub ident: String,
- pub dylib: Option<Path>,
- pub rlib: Option<Path>
+ pub dylib: Option<PathBuf>,
+ pub rlib: Option<PathBuf>
}
impl CratePaths {
- fn paths(&self) -> Vec<Path> {
+ fn paths(&self) -> Vec<PathBuf> {
match (&self.dylib, &self.rlib) {
(&None, &None) => vec!(),
(&Some(ref p), &None) |
}
}
if self.rejected_via_kind.len() > 0 {
- self.sess.span_help(self.span, "please recompile this crate using \
+ self.sess.fileline_help(self.span, "please recompile this crate using \
--crate-type lib");
let mismatches = self.rejected_via_kind.iter();
for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() {
//
// The goal of this step is to look at as little metadata as possible.
self.filesearch.search(|path, kind| {
- let file = match path.filename_str() {
+ let file = match path.file_name().and_then(|s| s.to_str()) {
None => return FileDoesntMatch,
Some(file) => file,
};
if file.starts_with(&staticlib_prefix[..]) &&
file.ends_with(".a") {
staticlibs.push(CrateMismatch {
- path: path.clone(),
+ path: path.to_path_buf(),
got: "static".to_string()
});
}
// read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted).
- fn extract_one(&mut self, m: HashMap<Path, PathKind>, flavor: &str,
- slot: &mut Option<MetadataBlob>) -> Option<(Path, PathKind)> {
- let mut ret = None::<(Path, PathKind)>;
+ fn extract_one(&mut self, m: HashMap<PathBuf, PathKind>, flavor: &str,
+ slot: &mut Option<MetadataBlob>) -> Option<(PathBuf, PathKind)> {
+ let mut ret = None::<(PathBuf, PathKind)>;
let mut error = 0;
if slot.is_some() {
if triple != self.triple {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch {
- path: libpath.clone(),
+ path: libpath.to_path_buf(),
got: triple.to_string()
});
return false;
if *myhash != hash {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch {
- path: libpath.clone(),
+ path: libpath.to_path_buf(),
got: myhash.as_str().to_string()
});
false
let mut rlibs = HashMap::new();
let mut dylibs = HashMap::new();
{
- let locs = locs.iter().map(|l| Path::new(&l[..])).filter(|loc| {
+ let locs = locs.iter().map(|l| PathBuf::new(&l[..])).filter(|loc| {
if !loc.exists() {
sess.err(&format!("extern location for {} does not exist: {}",
self.crate_name, loc.display()));
return false;
}
- let file = match loc.filename_str() {
+ let file = match loc.file_name().and_then(|s| s.to_str()) {
Some(file) => file,
None => {
sess.err(&format!("extern location for {} is not a file: {}",
// Now that we have an iterator of good candidates, make sure
// there's at most one rlib and at most one dylib.
for loc in locs {
- if loc.filename_str().unwrap().ends_with(".rlib") {
+ if loc.file_name().unwrap().to_str().unwrap().ends_with(".rlib") {
rlibs.insert(fs::realpath(&loc).unwrap(),
PathKind::ExternFlag);
} else {
let dur = Duration::span(|| {
ret = Some(get_metadata_section_imp(is_osx, filename));
});
- info!("reading {} => {}ms", filename.filename_display(),
+ info!("reading {:?} => {}ms", filename.file_name().unwrap(),
dur.num_milliseconds());
return ret.unwrap();;
}
if !filename.exists() {
return Err(format!("no such file: '{}'", filename.display()));
}
- if filename.filename_str().unwrap().ends_with(".rlib") {
+ if filename.file_name().unwrap().to_str().unwrap().ends_with(".rlib") {
// Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
// internally to read the file. We also avoid even using a memcpy by
// just keeping the archive along while the metadata is in use.
};
}
unsafe {
- let buf = CString::new(filename.as_vec()).unwrap();
+ let buf = common::path2cstr(filename);
let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
if mb as int == 0 {
return Err(format!("error reading library: '{}'",
// A diagnostic function for dumping crate metadata to an output stream
pub fn list_file_metadata(is_osx: bool, path: &Path,
- out: &mut old_io::Writer) -> old_io::IoResult<()> {
+ out: &mut io::Write) -> io::Result<()> {
match get_metadata_section(is_osx, path) {
Ok(bytes) => decoder::list_crate_metadata(bytes.as_slice(), out),
Err(msg) => {
}
"plugin" => {
self.sess.span_err(attr.span, "#[plugin] on `extern crate` is deprecated");
- self.sess.span_help(attr.span, &format!("use a crate attribute instead, \
+ self.sess.fileline_help(attr.span, &format!("use a crate attribute instead, \
i.e. #![plugin({})]",
item.ident.as_str()));
}
Some(sel) => sel.contains_key(&name),
};
def.export = reexport.contains_key(&name);
+ def.allow_internal_unstable = attr::contains_name(&def.attrs,
+ "allow_internal_unstable");
+ debug!("load_macros: loaded: {:?}", def);
self.macros.push(def);
}
use std::old_io::Seek;
use std::num::FromPrimitive;
use std::rc::Rc;
+use std::cell::Cell;
use rbml::reader;
use rbml::writer::Encoder;
tcx: &'a ty::ctxt<'tcx>,
cdata: &'b cstore::crate_metadata,
from_id_range: ast_util::IdRange,
- to_id_range: ast_util::IdRange
+ to_id_range: ast_util::IdRange,
+ // Cache the last used filemap for translating spans as an optimization.
+ last_filemap_index: Cell<usize>,
}
trait tr {
}
}
+/// Decodes an item from its AST in the cdata's metadata and adds it to the
+/// ast-map.
pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
tcx: &ty::ctxt<'tcx>,
path: Vec<ast_map::PathElem>,
cdata: cdata,
tcx: tcx,
from_id_range: from_id_range,
- to_id_range: to_id_range
+ to_id_range: to_id_range,
+ last_filemap_index: Cell::new(0)
};
let raw_ii = decode_ast(ast_doc);
let ii = ast_map::map_decoded_item(&dcx.tcx.map, path, raw_ii, dcx);
assert_eq!(did.krate, ast::LOCAL_CRATE);
ast::DefId { krate: ast::LOCAL_CRATE, node: self.tr_id(did.node) }
}
- pub fn tr_span(&self, _span: Span) -> Span {
- codemap::DUMMY_SP // FIXME (#1972): handle span properly
+
+ /// Translates a `Span` from an extern crate to the corresponding `Span`
+ /// within the local crate's codemap. `creader::import_codemap()` will
+ /// already have allocated any additionally needed FileMaps in the local
+ /// codemap as a side-effect of creating the crate_metadata's
+ /// `codemap_import_info`.
+ pub fn tr_span(&self, span: Span) -> Span {
+ let imported_filemaps = &self.cdata.codemap_import_info[..];
+
+ let filemap_index = {
+ // Optimize for the case that most spans within a translated item
+ // originate from the same filemap.
+ let last_filemap_index = self.last_filemap_index.get();
+
+ if span.lo >= imported_filemaps[last_filemap_index].original_start_pos &&
+ span.hi <= imported_filemaps[last_filemap_index].original_end_pos {
+ last_filemap_index
+ } else {
+ let mut a = 0;
+ let mut b = imported_filemaps.len();
+
+ while b - a > 1 {
+ let m = (a + b) / 2;
+ if imported_filemaps[m].original_start_pos > span.lo {
+ b = m;
+ } else {
+ a = m;
+ }
+ }
+
+ self.last_filemap_index.set(a);
+ a
+ }
+ };
+
+ let lo = (span.lo - imported_filemaps[filemap_index].original_start_pos) +
+ imported_filemaps[filemap_index].translated_filemap.start_pos;
+ let hi = (span.hi - imported_filemaps[filemap_index].original_start_pos) +
+ imported_filemaps[filemap_index].translated_filemap.start_pos;
+
+ codemap::mk_sp(lo, hi)
}
}
"pattern binding `{}` is named the same as one \
of the variants of the type `{}`",
&token::get_ident(ident.node), ty_to_string(cx.tcx, pat_ty));
- span_help!(cx.tcx.sess, p.span,
+ fileline_help!(cx.tcx.sess, p.span,
"if you meant to match on a variant, \
consider making the path in the pattern qualified: `{}::{}`",
ty_to_string(cx.tcx, pat_ty), &token::get_ident(ident.node));
ast::ExprBlock(ref block) => {
match block.expr {
Some(ref expr) => try!(eval_const_expr_partial(tcx, &**expr, ety)),
- None => const_int(0i64)
+ None => const_int(0)
}
}
ast::ExprTupField(ref base, index) => {
use middle::cfg;
use middle::cfg::CFGIndex;
use middle::ty;
-use std::old_io;
+use std::io;
use std::usize;
use std::iter::repeat;
use syntax::ast;
impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O> {
fn pre(&self,
ps: &mut pprust::State,
- node: pprust::AnnNode) -> old_io::IoResult<()> {
+ node: pprust::AnnNode) -> io::Result<()> {
let id = match node {
pprust::NodeIdent(_) | pprust::NodeName(_) => 0,
pprust::NodeExpr(expr) => expr.id,
debug!("Dataflow result for {}:", self.analysis_name);
debug!("{}", {
- self.pretty_print_to(box old_io::stderr(), blk).unwrap();
+ let mut v = Vec::new();
+ self.pretty_print_to(box &mut v, blk).unwrap();
+ println!("{}", String::from_utf8(v).unwrap());
""
});
}
- fn pretty_print_to(&self, wr: Box<old_io::Writer+'static>,
- blk: &ast::Block) -> old_io::IoResult<()> {
+ fn pretty_print_to<'b>(&self, wr: Box<io::Write + 'b>,
+ blk: &ast::Block) -> io::Result<()> {
let mut ps = pprust::rust_printer_annotated(wr, self);
try!(ps.cbox(pprust::indent_unit));
try!(ps.ibox(0));
// Does the required lifetime have a nice name we can print?
span_err!(self.tcx.sess, origin.span(), E0309,
"{} may not live long enough", labeled_user_string);
- self.tcx.sess.span_help(
+ self.tcx.sess.fileline_help(
origin.span(),
&format!(
"consider adding an explicit lifetime bound `{}: {}`...",
// Does the required lifetime have a nice name we can print?
span_err!(self.tcx.sess, origin.span(), E0310,
"{} may not live long enough", labeled_user_string);
- self.tcx.sess.span_help(
+ self.tcx.sess.fileline_help(
origin.span(),
&format!(
"consider adding an explicit lifetime bound `{}: 'static`...",
span_err!(self.tcx.sess, origin.span(), E0311,
"{} may not live long enough",
labeled_user_string);
- self.tcx.sess.span_help(
+ self.tcx.sess.fileline_help(
origin.span(),
&format!(
"consider adding an explicit lifetime bound for `{}`",
use std::borrow::Cow;
use std::collections::hash_map::Entry::Vacant;
-use std::old_io::{self, File};
use std::env;
+use std::fs::File;
+use std::io;
+use std::io::prelude::*;
use std::sync::atomic::{AtomicBool, Ordering, ATOMIC_BOOL_INIT};
use syntax::ast;
fn dump_region_constraints_to<'a, 'tcx:'a >(tcx: &'a ty::ctxt<'tcx>,
map: &ConstraintMap<'tcx>,
- path: &str) -> old_io::IoResult<()> {
+ path: &str) -> io::Result<()> {
debug!("dump_region_constraints map (len: {}) path: {}", map.len(), path);
let g = ConstraintGraph::new(tcx, format!("region_constraints"), map);
- let mut f = File::create(&Path::new(path));
debug!("dump_region_constraints calling render");
- dot::render(&g, &mut f)
+ let mut v = Vec::new();
+ dot::render(&g, &mut v).unwrap();
+ File::create(path).and_then(|mut f| f.write_all(&v))
}
/// Helper for discovering nodes to check for stability
pub fn check_expr(tcx: &ty::ctxt, e: &ast::Expr,
cb: &mut FnMut(ast::DefId, Span, &Option<Stability>)) {
- if is_internal(tcx, e.span) { return; }
-
let span;
let id = match e.node {
ast::ExprMethodCall(i, _, _) => {
fn maybe_do_stability_check(tcx: &ty::ctxt, id: ast::DefId, span: Span,
cb: &mut FnMut(ast::DefId, Span, &Option<Stability>)) {
if !is_staged_api(tcx, id) { return }
+ if is_internal(tcx, span) { return }
let ref stability = lookup(tcx, id);
cb(id, span, stability);
}
fn is_internal(tcx: &ty::ctxt, span: Span) -> bool {
- tcx.sess.codemap().span_is_internal(span)
+ tcx.sess.codemap().span_allows_unstable(span)
}
fn is_staged_api(tcx: &ty::ctxt, id: DefId) -> bool {
/// for the object type `Foo`.
#[derive(PartialEq,Eq,Clone)]
pub struct VtableObjectData<'tcx> {
+ /// the object type `Foo`.
pub object_ty: Ty<'tcx>,
+
+ /// `Foo` upcast to the obligation trait. This will be some supertrait of `Foo`.
+ pub upcast_trait_ref: ty::PolyTraitRef<'tcx>,
}
/// Creates predicate obligations from the generic bounds.
use middle::subst::{self, SelfSpace, TypeSpace};
use middle::traits;
-use middle::ty::{self, Ty};
+use middle::ty::{self, ToPolyTraitRef, Ty};
use std::rc::Rc;
use syntax::ast;
use util::ppaux::Repr;
{
let trait_def = ty::lookup_trait_def(tcx, trait_def_id);
let trait_ref = trait_def.trait_ref.clone();
- let predicates = ty::predicates_for_trait_ref(tcx, &ty::Binder(trait_ref));
+ let trait_ref = trait_ref.to_poly_trait_ref();
+ let predicates = ty::lookup_super_predicates(tcx, trait_def_id);
predicates
+ .predicates
.into_iter()
+ .map(|predicate| predicate.subst_supertrait(tcx, &trait_ref))
.any(|predicate| {
match predicate {
ty::Predicate::Trait(ref data) => {
poly_trait_ref.repr(self.tcx()));
// see whether the object trait can be upcast to the trait we are looking for
- let obligation_def_id = obligation.predicate.def_id();
- let upcast_trait_ref = match util::upcast(self.tcx(), poly_trait_ref, obligation_def_id) {
- Some(r) => r,
- None => { return; }
- };
-
- debug!("assemble_candidates_from_object_ty: upcast_trait_ref={}",
- upcast_trait_ref.repr(self.tcx()));
-
- // check whether the upcast version of the trait-ref matches what we are looking for
- if let Ok(()) = self.infcx.probe(|_| self.match_poly_trait_ref(obligation,
- upcast_trait_ref.clone())) {
- debug!("assemble_candidates_from_object_ty: matched, pushing candidate");
+ let upcast_trait_refs = self.upcast(poly_trait_ref, obligation);
+ if upcast_trait_refs.len() > 1 {
+ // can be upcast in many ways; need more type information
+ candidates.ambiguous = true;
+ } else if upcast_trait_refs.len() == 1 {
candidates.vec.push(ObjectCandidate);
}
}
let principal =
data.principal_trait_ref_with_self_ty(self.tcx(),
self.tcx().types.err);
+ let desired_def_id = obligation.predicate.def_id();
for tr in util::supertraits(self.tcx(), principal) {
- let td = ty::lookup_trait_def(self.tcx(), tr.def_id());
- if td.bounds.builtin_bounds.contains(&bound) {
+ if tr.def_id() == desired_def_id {
return Ok(If(Vec::new()))
}
}
}
};
- let obligation_def_id = obligation.predicate.def_id();
- let upcast_trait_ref = match util::upcast(self.tcx(),
- poly_trait_ref.clone(),
- obligation_def_id) {
- Some(r) => r,
- None => {
- self.tcx().sess.span_bug(obligation.cause.span,
- &format!("unable to upcast from {} to {}",
- poly_trait_ref.repr(self.tcx()),
- obligation_def_id.repr(self.tcx())));
- }
- };
+ // Upcast the object type to the obligation type. There must
+ // be exactly one applicable trait-reference; if this were not
+ // the case, we would have reported an ambiguity error rather
+ // than successfully selecting one of the candidates.
+ let upcast_trait_refs = self.upcast(poly_trait_ref.clone(), obligation);
+ assert_eq!(upcast_trait_refs.len(), 1);
+ let upcast_trait_ref = upcast_trait_refs.into_iter().next().unwrap();
- match self.match_poly_trait_ref(obligation, upcast_trait_ref) {
+ match self.match_poly_trait_ref(obligation, upcast_trait_ref.clone()) {
Ok(()) => { }
Err(()) => {
self.tcx().sess.span_bug(obligation.cause.span,
}
}
- VtableObjectData { object_ty: self_ty }
+ VtableObjectData { object_ty: self_ty,
+ upcast_trait_ref: upcast_trait_ref }
}
fn confirm_fn_pointer_candidate(&mut self,
obligation.cause.clone()
}
}
+
+ /// Upcasts an object trait-reference into those that match the obligation.
+ fn upcast(&mut self, obj_trait_ref: ty::PolyTraitRef<'tcx>, obligation: &TraitObligation<'tcx>)
+ -> Vec<ty::PolyTraitRef<'tcx>>
+ {
+ debug!("upcast(obj_trait_ref={}, obligation={})",
+ obj_trait_ref.repr(self.tcx()),
+ obligation.repr(self.tcx()));
+
+ let obligation_def_id = obligation.predicate.def_id();
+ let mut upcast_trait_refs = util::upcast(self.tcx(), obj_trait_ref, obligation_def_id);
+
+ // Retain only those upcast versions that match the trait-ref
+ // we are looking for. In particular, we know that all of
+ // `upcast_trait_refs` apply to the correct trait, but
+ // possibly with incorrect type parameters. For example, we
+ // may be trying to upcast `Foo` to `Bar<i32>`, but `Foo` is
+ // declared as `trait Foo : Bar<u32>`.
+ upcast_trait_refs.retain(|upcast_trait_ref| {
+ let upcast_trait_ref = upcast_trait_ref.clone();
+ self.infcx.probe(|_| self.match_poly_trait_ref(obligation, upcast_trait_ref)).is_ok()
+ });
+
+ debug!("upcast: upcast_trait_refs={}", upcast_trait_refs.repr(self.tcx()));
+ upcast_trait_refs
+ }
}
impl<'tcx> Repr<'tcx> for SelectionCandidate<'tcx> {
/// 'static`.
pub struct Elaborator<'cx, 'tcx:'cx> {
tcx: &'cx ty::ctxt<'tcx>,
- stack: Vec<StackEntry<'tcx>>,
+ stack: Vec<ty::Predicate<'tcx>>,
visited: PredicateSet<'cx,'tcx>,
}
-struct StackEntry<'tcx> {
- position: uint,
- predicates: Vec<ty::Predicate<'tcx>>,
-}
-
pub fn elaborate_trait_ref<'cx, 'tcx>(
tcx: &'cx ty::ctxt<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>)
{
let mut visited = PredicateSet::new(tcx);
predicates.retain(|pred| visited.insert(pred));
- let entry = StackEntry { position: 0, predicates: predicates };
- Elaborator { tcx: tcx, stack: vec![entry], visited: visited }
+ Elaborator { tcx: tcx, stack: predicates, visited: visited }
}
impl<'cx, 'tcx> Elaborator<'cx, 'tcx> {
- pub fn filter_to_traits(self) -> Supertraits<'cx, 'tcx> {
- Supertraits { elaborator: self }
+ pub fn filter_to_traits(self) -> FilterToTraits<Elaborator<'cx, 'tcx>> {
+ FilterToTraits::new(self)
}
fn push(&mut self, predicate: &ty::Predicate<'tcx>) {
match *predicate {
ty::Predicate::Trait(ref data) => {
- let mut predicates =
- ty::predicates_for_trait_ref(self.tcx,
- &data.to_poly_trait_ref());
+ // Predicates declared on the trait.
+ let predicates = ty::lookup_super_predicates(self.tcx, data.def_id());
+
+ let mut predicates: Vec<_> =
+ predicates.predicates
+ .iter()
+ .map(|p| p.subst_supertrait(self.tcx, &data.to_poly_trait_ref()))
+ .collect();
+
+ debug!("super_predicates: data={} predicates={}",
+ data.repr(self.tcx), predicates.repr(self.tcx));
// Only keep those bounds that we haven't already
// seen. This is necessary to prevent infinite
// Sized { }`.
predicates.retain(|r| self.visited.insert(r));
- self.stack.push(StackEntry { position: 0,
- predicates: predicates });
+ self.stack.extend(predicates.into_iter());
}
ty::Predicate::Equate(..) => {
// Currently, we do not "elaborate" predicates like
type Item = ty::Predicate<'tcx>;
fn next(&mut self) -> Option<ty::Predicate<'tcx>> {
- loop {
- // Extract next item from top-most stack frame, if any.
- let next_predicate = match self.stack.last_mut() {
- None => {
- // No more stack frames. Done.
- return None;
- }
- Some(entry) => {
- let p = entry.position;
- if p < entry.predicates.len() {
- // Still more predicates left in the top stack frame.
- entry.position += 1;
-
- let next_predicate =
- entry.predicates[p].clone();
-
- Some(next_predicate)
- } else {
- None
- }
- }
- };
-
- match next_predicate {
- Some(next_predicate) => {
- self.push(&next_predicate);
- return Some(next_predicate);
- }
-
- None => {
- // Top stack frame is exhausted, pop it.
- self.stack.pop();
- }
+ // Extract next item from top-most stack frame, if any.
+ let next_predicate = match self.stack.pop() {
+ Some(predicate) => predicate,
+ None => {
+ // No more stack frames. Done.
+ return None;
}
- }
+ };
+ self.push(&next_predicate);
+ return Some(next_predicate);
}
}
// Supertrait iterator
///////////////////////////////////////////////////////////////////////////
-/// A filter around the `Elaborator` that just yields up supertrait references,
-/// not other kinds of predicates.
-pub struct Supertraits<'cx, 'tcx:'cx> {
- elaborator: Elaborator<'cx, 'tcx>,
-}
+pub type Supertraits<'cx, 'tcx> = FilterToTraits<Elaborator<'cx, 'tcx>>;
pub fn supertraits<'cx, 'tcx>(tcx: &'cx ty::ctxt<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>)
elaborate_trait_refs(tcx, bounds).filter_to_traits()
}
-impl<'cx, 'tcx> Iterator for Supertraits<'cx, 'tcx> {
+///////////////////////////////////////////////////////////////////////////
+// Other
+///////////////////////////////////////////////////////////////////////////
+
+/// A filter around an iterator of predicates that makes it yield up
+/// just trait references.
+pub struct FilterToTraits<I> {
+ base_iterator: I
+}
+
+impl<I> FilterToTraits<I> {
+ fn new(base: I) -> FilterToTraits<I> {
+ FilterToTraits { base_iterator: base }
+ }
+}
+
+impl<'tcx,I:Iterator<Item=ty::Predicate<'tcx>>> Iterator for FilterToTraits<I> {
type Item = ty::PolyTraitRef<'tcx>;
fn next(&mut self) -> Option<ty::PolyTraitRef<'tcx>> {
loop {
- match self.elaborator.next() {
+ match self.base_iterator.next() {
None => {
return None;
}
}
}
+
///////////////////////////////////////////////////////////////////////////
// Other
///////////////////////////////////////////////////////////////////////////
pub fn upcast<'tcx>(tcx: &ty::ctxt<'tcx>,
source_trait_ref: ty::PolyTraitRef<'tcx>,
target_trait_def_id: ast::DefId)
- -> Option<ty::PolyTraitRef<'tcx>>
+ -> Vec<ty::PolyTraitRef<'tcx>>
{
if source_trait_ref.def_id() == target_trait_def_id {
- return Some(source_trait_ref); // shorcut the most common case
- }
-
- for super_trait_ref in supertraits(tcx, source_trait_ref) {
- if super_trait_ref.def_id() == target_trait_def_id {
- return Some(super_trait_ref);
- }
+ return vec![source_trait_ref]; // shorcut the most common case
}
- None
+ supertraits(tcx, source_trait_ref)
+ .filter(|r| r.def_id() == target_trait_def_id)
+ .collect()
}
/// Given an object of type `object_trait_ref`, returns the index of
pub use self::InferRegion::*;
pub use self::ImplOrTraitItemId::*;
pub use self::ClosureKind::*;
-pub use self::ast_ty_to_ty_cache_entry::*;
pub use self::Variance::*;
pub use self::AutoAdjustment::*;
pub use self::Representability::*;
pub len: uint
}
-#[derive(Copy)]
-pub enum ast_ty_to_ty_cache_entry<'tcx> {
- atttce_unresolved, /* not resolved yet */
- atttce_resolved(Ty<'tcx>) /* resolved to a type, irrespective of region */
-}
-
#[derive(Clone, PartialEq, RustcDecodable, RustcEncodable)]
pub struct ItemVariances {
pub types: VecPerParamSpace<Variance>,
/// associated predicates.
pub predicates: RefCell<DefIdMap<GenericPredicates<'tcx>>>,
+ /// Maps from the def-id of a trait to the list of
+ /// super-predicates. This is a subset of the full list of
+ /// predicates. We store these in a separate map because we must
+ /// evaluate them even during type conversion, often before the
+ /// full predicates are available (note that supertraits have
+ /// additional acyclicity requirements).
+ pub super_predicates: RefCell<DefIdMap<GenericPredicates<'tcx>>>,
+
/// Maps from node-id of a trait object cast (like `foo as
/// Box<Trait>`) to the trait reference.
pub object_cast_map: ObjectCastMap<'tcx>,
pub rcache: RefCell<FnvHashMap<creader_cache_key, Ty<'tcx>>>,
pub short_names_cache: RefCell<FnvHashMap<Ty<'tcx>, String>>,
pub tc_cache: RefCell<FnvHashMap<Ty<'tcx>, TypeContents>>,
- pub ast_ty_to_ty_cache: RefCell<NodeMap<ast_ty_to_ty_cache_entry<'tcx>>>,
+ pub ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
pub enum_var_cache: RefCell<DefIdMap<Rc<Vec<Rc<VariantInfo<'tcx>>>>>>,
pub ty_param_defs: RefCell<NodeMap<TypeParameterDef<'tcx>>>,
pub adjustments: RefCell<NodeMap<AutoAdjustment<'tcx>>>,
/// definition and not a concrete use of it. To get the correct `ty_enum`
/// from the tcx, use the `NodeId` from the `ast::Ty` and look it up in
/// the `ast_ty_to_ty_cache`. This is probably true for `ty_struct` as
- /// well.`
+ /// well.
ty_enum(DefId, &'tcx Substs<'tcx>),
ty_uniq(Ty<'tcx>),
ty_str,
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Binder<T>(pub T);
+impl<T> Binder<T> {
+ /// Skips the binder and returns the "bound" value. This is a
+ /// risky thing to do because it's easy to get confused about
+ /// debruijn indices and the like. It is usually better to
+ /// discharge the binder using `no_late_bound_regions` or
+ /// `replace_late_bound_regions` or something like
+ /// that. `skip_binder` is only valid when you are either
+ /// extracting data that has nothing to do with bound regions, you
+ /// are doing some sort of test that does not involve bound
+ /// regions, or you are being very careful about your depth
+ /// accounting.
+ ///
+ /// Some examples where `skip_binder` is reasonable:
+ /// - extracting the def-id from a PolyTraitRef;
+ /// - comparing the self type of a PolyTraitRef to see if it is equal to
+ /// a type parameter `X`, since the type `X` does not reference any regions
+ pub fn skip_binder(&self) -> &T {
+ &self.0
+ }
+}
+
#[derive(Clone, Copy, PartialEq)]
pub enum IntVarValue {
IntType(ast::IntTy),
predicates: self.predicates.subst(tcx, substs),
}
}
+
+ pub fn instantiate_supertrait(&self,
+ tcx: &ty::ctxt<'tcx>,
+ poly_trait_ref: &ty::PolyTraitRef<'tcx>)
+ -> InstantiatedPredicates<'tcx>
+ {
+ InstantiatedPredicates {
+ predicates: self.predicates.map(|pred| pred.subst_supertrait(tcx, poly_trait_ref))
+ }
+ }
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
Projection(PolyProjectionPredicate<'tcx>),
}
+impl<'tcx> Predicate<'tcx> {
+ /// Performs a substituion suitable for going from a
+ /// poly-trait-ref to supertraits that must hold if that
+ /// poly-trait-ref holds. This is slightly different from a normal
+ /// substitution in terms of what happens with bound regions. See
+ /// lengthy comment below for details.
+ pub fn subst_supertrait(&self,
+ tcx: &ty::ctxt<'tcx>,
+ trait_ref: &ty::PolyTraitRef<'tcx>)
+ -> ty::Predicate<'tcx>
+ {
+ // The interaction between HRTB and supertraits is not entirely
+ // obvious. Let me walk you (and myself) through an example.
+ //
+ // Let's start with an easy case. Consider two traits:
+ //
+ // trait Foo<'a> : Bar<'a,'a> { }
+ // trait Bar<'b,'c> { }
+ //
+ // Now, if we have a trait reference `for<'x> T : Foo<'x>`, then
+ // we can deduce that `for<'x> T : Bar<'x,'x>`. Basically, if we
+ // knew that `Foo<'x>` (for any 'x) then we also know that
+ // `Bar<'x,'x>` (for any 'x). This more-or-less falls out from
+ // normal substitution.
+ //
+ // In terms of why this is sound, the idea is that whenever there
+ // is an impl of `T:Foo<'a>`, it must show that `T:Bar<'a,'a>`
+ // holds. So if there is an impl of `T:Foo<'a>` that applies to
+ // all `'a`, then we must know that `T:Bar<'a,'a>` holds for all
+ // `'a`.
+ //
+ // Another example to be careful of is this:
+ //
+ // trait Foo1<'a> : for<'b> Bar1<'a,'b> { }
+ // trait Bar1<'b,'c> { }
+ //
+ // Here, if we have `for<'x> T : Foo1<'x>`, then what do we know?
+ // The answer is that we know `for<'x,'b> T : Bar1<'x,'b>`. The
+ // reason is similar to the previous example: any impl of
+ // `T:Foo1<'x>` must show that `for<'b> T : Bar1<'x, 'b>`. So
+ // basically we would want to collapse the bound lifetimes from
+ // the input (`trait_ref`) and the supertraits.
+ //
+ // To achieve this in practice is fairly straightforward. Let's
+ // consider the more complicated scenario:
+ //
+ // - We start out with `for<'x> T : Foo1<'x>`. In this case, `'x`
+ // has a De Bruijn index of 1. We want to produce `for<'x,'b> T : Bar1<'x,'b>`,
+ // where both `'x` and `'b` would have a DB index of 1.
+ // The substitution from the input trait-ref is therefore going to be
+ // `'a => 'x` (where `'x` has a DB index of 1).
+ // - The super-trait-ref is `for<'b> Bar1<'a,'b>`, where `'a` is an
+ // early-bound parameter and `'b' is a late-bound parameter with a
+ // DB index of 1.
+ // - If we replace `'a` with `'x` from the input, it too will have
+ // a DB index of 1, and thus we'll have `for<'x,'b> Bar1<'x,'b>`
+ // just as we wanted.
+ //
+ // There is only one catch. If we just apply the substitution `'a
+ // => 'x` to `for<'b> Bar1<'a,'b>`, the substitution code will
+ // adjust the DB index because we substituting into a binder (it
+ // tries to be so smart...) resulting in `for<'x> for<'b>
+ // Bar1<'x,'b>` (we have no syntax for this, so use your
+ // imagination). Basically the 'x will have DB index of 2 and 'b
+ // will have DB index of 1. Not quite what we want. So we apply
+ // the substitution to the *contents* of the trait reference,
+ // rather than the trait reference itself (put another way, the
+ // substitution code expects equal binding levels in the values
+ // from the substitution and the value being substituted into, and
+ // this trick achieves that).
+
+ let substs = &trait_ref.0.substs;
+ match *self {
+ Predicate::Trait(ty::Binder(ref data)) =>
+ Predicate::Trait(ty::Binder(data.subst(tcx, substs))),
+ Predicate::Equate(ty::Binder(ref data)) =>
+ Predicate::Equate(ty::Binder(data.subst(tcx, substs))),
+ Predicate::RegionOutlives(ty::Binder(ref data)) =>
+ Predicate::RegionOutlives(ty::Binder(data.subst(tcx, substs))),
+ Predicate::TypeOutlives(ty::Binder(ref data)) =>
+ Predicate::TypeOutlives(ty::Binder(data.subst(tcx, substs))),
+ Predicate::Projection(ty::Binder(ref data)) =>
+ Predicate::Projection(ty::Binder(data.subst(tcx, substs))),
+ }
+ }
+}
+
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct TraitPredicate<'tcx> {
pub trait_ref: Rc<TraitRef<'tcx>>
/// implements the trait.
pub generics: Generics<'tcx>,
- /// The "supertrait" bounds.
- pub bounds: ParamBounds<'tcx>,
-
pub trait_ref: Rc<ty::TraitRef<'tcx>>,
/// A list of the associated types defined in this trait. Useful
impl_trait_refs: RefCell::new(NodeMap()),
trait_defs: RefCell::new(DefIdMap()),
predicates: RefCell::new(DefIdMap()),
+ super_predicates: RefCell::new(DefIdMap()),
object_cast_map: RefCell::new(NodeMap()),
map: map,
intrinsic_defs: RefCell::new(DefIdMap()),
})
}
-/// Given the did of a trait, returns its full set of predicates.
+/// Given the did of an item, returns its full set of predicates.
pub fn lookup_predicates<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId)
-> GenericPredicates<'tcx>
{
})
}
-/// Given a reference to a trait, returns the "superbounds" declared
-/// on the trait, with appropriate substitutions applied. Basically,
-/// this applies a filter to the where clauses on the trait, returning
-/// those that have the form:
-///
-/// Self : SuperTrait<...>
-/// Self : 'region
-pub fn predicates_for_trait_ref<'tcx>(tcx: &ctxt<'tcx>,
- trait_ref: &PolyTraitRef<'tcx>)
- -> Vec<ty::Predicate<'tcx>>
+/// Given the did of a trait, returns its superpredicates.
+pub fn lookup_super_predicates<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId)
+ -> GenericPredicates<'tcx>
{
- let trait_def = lookup_trait_def(tcx, trait_ref.def_id());
-
- debug!("bounds_for_trait_ref(trait_def={:?}, trait_ref={:?})",
- trait_def.repr(tcx), trait_ref.repr(tcx));
-
- // The interaction between HRTB and supertraits is not entirely
- // obvious. Let me walk you (and myself) through an example.
- //
- // Let's start with an easy case. Consider two traits:
- //
- // trait Foo<'a> : Bar<'a,'a> { }
- // trait Bar<'b,'c> { }
- //
- // Now, if we have a trait reference `for<'x> T : Foo<'x>`, then
- // we can deduce that `for<'x> T : Bar<'x,'x>`. Basically, if we
- // knew that `Foo<'x>` (for any 'x) then we also know that
- // `Bar<'x,'x>` (for any 'x). This more-or-less falls out from
- // normal substitution.
- //
- // In terms of why this is sound, the idea is that whenever there
- // is an impl of `T:Foo<'a>`, it must show that `T:Bar<'a,'a>`
- // holds. So if there is an impl of `T:Foo<'a>` that applies to
- // all `'a`, then we must know that `T:Bar<'a,'a>` holds for all
- // `'a`.
- //
- // Another example to be careful of is this:
- //
- // trait Foo1<'a> : for<'b> Bar1<'a,'b> { }
- // trait Bar1<'b,'c> { }
- //
- // Here, if we have `for<'x> T : Foo1<'x>`, then what do we know?
- // The answer is that we know `for<'x,'b> T : Bar1<'x,'b>`. The
- // reason is similar to the previous example: any impl of
- // `T:Foo1<'x>` must show that `for<'b> T : Bar1<'x, 'b>`. So
- // basically we would want to collapse the bound lifetimes from
- // the input (`trait_ref`) and the supertraits.
- //
- // To achieve this in practice is fairly straightforward. Let's
- // consider the more complicated scenario:
- //
- // - We start out with `for<'x> T : Foo1<'x>`. In this case, `'x`
- // has a De Bruijn index of 1. We want to produce `for<'x,'b> T : Bar1<'x,'b>`,
- // where both `'x` and `'b` would have a DB index of 1.
- // The substitution from the input trait-ref is therefore going to be
- // `'a => 'x` (where `'x` has a DB index of 1).
- // - The super-trait-ref is `for<'b> Bar1<'a,'b>`, where `'a` is an
- // early-bound parameter and `'b' is a late-bound parameter with a
- // DB index of 1.
- // - If we replace `'a` with `'x` from the input, it too will have
- // a DB index of 1, and thus we'll have `for<'x,'b> Bar1<'x,'b>`
- // just as we wanted.
- //
- // There is only one catch. If we just apply the substitution `'a
- // => 'x` to `for<'b> Bar1<'a,'b>`, the substitution code will
- // adjust the DB index because we substituting into a binder (it
- // tries to be so smart...) resulting in `for<'x> for<'b>
- // Bar1<'x,'b>` (we have no syntax for this, so use your
- // imagination). Basically the 'x will have DB index of 2 and 'b
- // will have DB index of 1. Not quite what we want. So we apply
- // the substitution to the *contents* of the trait reference,
- // rather than the trait reference itself (put another way, the
- // substitution code expects equal binding levels in the values
- // from the substitution and the value being substituted into, and
- // this trick achieves that).
-
- // Carefully avoid the binder introduced by each trait-ref by
- // substituting over the substs, not the trait-refs themselves,
- // thus achieving the "collapse" described in the big comment
- // above.
- let trait_bounds: Vec<_> =
- trait_def.bounds.trait_bounds
- .iter()
- .map(|poly_trait_ref| ty::Binder(poly_trait_ref.0.subst(tcx, trait_ref.substs())))
- .collect();
-
- let projection_bounds: Vec<_> =
- trait_def.bounds.projection_bounds
- .iter()
- .map(|poly_proj| ty::Binder(poly_proj.0.subst(tcx, trait_ref.substs())))
- .collect();
-
- debug!("bounds_for_trait_ref: trait_bounds={} projection_bounds={}",
- trait_bounds.repr(tcx),
- projection_bounds.repr(tcx));
-
- // The region bounds and builtin bounds do not currently introduce
- // binders so we can just substitute in a straightforward way here.
- let region_bounds =
- trait_def.bounds.region_bounds.subst(tcx, trait_ref.substs());
- let builtin_bounds =
- trait_def.bounds.builtin_bounds.subst(tcx, trait_ref.substs());
-
- let bounds = ty::ParamBounds {
- trait_bounds: trait_bounds,
- region_bounds: region_bounds,
- builtin_bounds: builtin_bounds,
- projection_bounds: projection_bounds,
- };
-
- predicates(tcx, trait_ref.self_ty(), &bounds)
+ memoized(&cx.super_predicates, did, |did: DefId| {
+ assert!(did.krate != ast::LOCAL_CRATE);
+ csearch::get_super_predicates(cx, did)
+ })
}
pub fn predicates<'tcx>(
impl<'tcx> TypeFoldable<'tcx> for traits::VtableObjectData<'tcx> {
fn fold_with<F:TypeFolder<'tcx>>(&self, folder: &mut F) -> traits::VtableObjectData<'tcx> {
traits::VtableObjectData {
- object_ty: self.object_ty.fold_with(folder)
+ object_ty: self.object_ty.fold_with(folder),
+ upcast_trait_ref: self.upcast_trait_ref.fold_with(folder),
}
}
}
use metadata::creader::CrateReader;
use plugin::registry::Registry;
-use std::mem;
-use std::os;
-use std::dynamic_lib::DynamicLibrary;
use std::borrow::ToOwned;
+use std::dynamic_lib::DynamicLibrary;
+use std::env;
+use std::mem;
+use std::old_path;
+use std::path::PathBuf;
use syntax::ast;
use syntax::codemap::{Span, COMMAND_LINE_SP};
use syntax::ptr::P;
// Dynamically link a registrar function into the compiler process.
fn dylink_registrar(&mut self,
span: Span,
- path: Path,
+ path: PathBuf,
symbol: String) -> PluginRegistrarFun {
// Make sure the path contains a / or the linker will search for it.
- let path = os::getcwd().unwrap().join(&path);
+ let path = env::current_dir().unwrap().join(&path);
+ let path = old_path::Path::new(path.to_str().unwrap());
let lib = match DynamicLibrary::open(Some(&path)) {
Ok(lib) => lib,
/// This is the most general hook into `libsyntax`'s expansion behavior.
pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxExtension) {
self.syntax_exts.push((name, match extension {
- NormalTT(ext, _) => NormalTT(ext, Some(self.krate_span)),
- IdentTT(ext, _) => IdentTT(ext, Some(self.krate_span)),
+ NormalTT(ext, _, allow_internal_unstable) => {
+ NormalTT(ext, Some(self.krate_span), allow_internal_unstable)
+ }
+ IdentTT(ext, _, allow_internal_unstable) => {
+ IdentTT(ext, Some(self.krate_span), allow_internal_unstable)
+ }
Decorator(ext) => Decorator(ext),
Modifier(ext) => Modifier(ext),
MultiModifier(ext) => MultiModifier(ext),
/// It builds for you a `NormalTT` that calls `expander`,
/// and also takes care of interning the macro's name.
pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) {
- self.register_syntax_extension(token::intern(name), NormalTT(Box::new(expander), None));
+ self.register_syntax_extension(token::intern(name),
+ NormalTT(Box::new(expander), None, false));
}
/// Register a compiler lint pass.
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::env;
use std::fmt;
+use std::path::PathBuf;
use llvm;
pub gc: bool,
pub optimize: OptLevel,
+ pub debug_assertions: bool,
pub debuginfo: DebugInfoLevel,
pub lint_opts: Vec<(String, lint::Level)>,
pub describe_lints: bool,
// this.
pub search_paths: SearchPaths,
pub libs: Vec<(String, cstore::NativeLibraryKind)>,
- pub maybe_sysroot: Option<Path>,
+ pub maybe_sysroot: Option<PathBuf>,
pub target_triple: String,
// User-specified cfg meta items. The compiler itself will add additional
// items to the crate config, and during parsing the entire crate config
pub no_analysis: bool,
pub debugging_opts: DebuggingOptions,
/// Whether to write dependency files. It's (enabled, optional filename).
- pub write_dependency_info: (bool, Option<Path>),
+ pub write_dependency_info: (bool, Option<PathBuf>),
pub prints: Vec<PrintRequest>,
pub cg: CodegenOptions,
pub color: ColorConfig,
pub enum Input {
/// Load source from file
- File(Path),
+ File(PathBuf),
/// The string is the source
Str(String)
}
impl Input {
pub fn filestem(&self) -> String {
match *self {
- Input::File(ref ifile) => ifile.filestem_str().unwrap().to_string(),
+ Input::File(ref ifile) => ifile.file_stem().unwrap()
+ .to_str().unwrap().to_string(),
Input::Str(_) => "rust_out".to_string(),
}
}
#[derive(Clone)]
pub struct OutputFilenames {
- pub out_directory: Path,
+ pub out_directory: PathBuf,
pub out_filestem: String,
- pub single_output_file: Option<Path>,
+ pub single_output_file: Option<PathBuf>,
pub extra: String,
}
impl OutputFilenames {
- pub fn path(&self, flavor: OutputType) -> Path {
+ pub fn path(&self, flavor: OutputType) -> PathBuf {
match self.single_output_file {
Some(ref path) => return path.clone(),
None => {}
self.temp_path(flavor)
}
- pub fn temp_path(&self, flavor: OutputType) -> Path {
- let base = self.out_directory.join(self.filestem());
+ pub fn temp_path(&self, flavor: OutputType) -> PathBuf {
+ let base = self.out_directory.join(&self.filestem());
match flavor {
OutputTypeBitcode => base.with_extension("bc"),
OutputTypeAssembly => base.with_extension("s"),
}
}
- pub fn with_extension(&self, extension: &str) -> Path {
- self.out_directory.join(self.filestem()).with_extension(extension)
+ pub fn with_extension(&self, extension: &str) -> PathBuf {
+ self.out_directory.join(&self.filestem()).with_extension(extension)
}
pub fn filestem(&self) -> String {
crate_name: None,
alt_std_name: None,
libs: Vec::new(),
- unstable_features: UnstableFeatures::Disallow
+ unstable_features: UnstableFeatures::Disallow,
+ debug_assertions: true,
}
}
2 = full debug info with variable and type information"),
opt_level: Option<uint> = (None, parse_opt_uint,
"Optimize with possible levels 0-3"),
+ debug_assertions: Option<bool> = (None, parse_opt_bool,
+ "explicitly enable the cfg(debug_assertions) directive"),
}
};
let mk = attr::mk_name_value_item_str;
- return vec!(// Target bindings.
+ let mut ret = vec![ // Target bindings.
attr::mk_word_item(fam.clone()),
mk(InternedString::new("target_os"), intern(os)),
mk(InternedString::new("target_family"), fam),
mk(InternedString::new("target_endian"), intern(end)),
mk(InternedString::new("target_pointer_width"),
intern(wordsz))
- );
+ ];
+ if sess.opts.debug_assertions {
+ ret.push(attr::mk_word_item(InternedString::new("debug_assertions")));
+ }
+ return ret;
}
pub fn append_configuration(cfg: &mut ast::CrateConfig,
let cg = build_codegen_options(matches);
- let sysroot_opt = matches.opt_str("sysroot").map(|m| Path::new(m));
+ let sysroot_opt = matches.opt_str("sysroot").map(|m| PathBuf::new(&m));
let target = matches.opt_str("target").unwrap_or(
host_triple().to_string());
let opt_level = {
}
}
};
+ let debug_assertions = cg.debug_assertions.unwrap_or(opt_level == No);
let gc = debugging_opts.gc;
let debuginfo = if matches.opt_present("g") {
if cg.debuginfo.is_some() {
alt_std_name: None,
libs: libs,
unstable_features: get_unstable_features_setting(),
+ debug_assertions: debug_assertions,
}
}
use rustc_back::target::Target;
+use std::path::{Path, PathBuf};
use std::cell::{Cell, RefCell};
-use std::os;
+use std::env;
pub mod config;
pub mod search_paths;
pub entry_fn: RefCell<Option<(NodeId, codemap::Span)>>,
pub entry_type: Cell<Option<config::EntryFnType>>,
pub plugin_registrar_fn: Cell<Option<ast::NodeId>>,
- pub default_sysroot: Option<Path>,
+ pub default_sysroot: Option<PathBuf>,
// The name of the root source file of the crate, in the local file system. The path is always
// expected to be absolute. `None` means that there is no source file.
- pub local_crate_source_file: Option<Path>,
- pub working_dir: Path,
+ pub local_crate_source_file: Option<PathBuf>,
+ pub working_dir: PathBuf,
pub lint_store: RefCell<lint::LintStore>,
pub lints: RefCell<NodeMap<Vec<(lint::LintId, codemap::Span, String)>>>,
pub crate_types: RefCell<Vec<config::CrateType>>,
impl Session {
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
+ if self.opts.treat_err_as_bug {
+ self.span_bug(sp, msg);
+ }
self.diagnostic().span_fatal(sp, msg)
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
+ if self.opts.treat_err_as_bug {
+ self.span_bug(sp, msg);
+ }
self.diagnostic().span_fatal_with_code(sp, msg, code)
}
pub fn fatal(&self, msg: &str) -> ! {
+ if self.opts.treat_err_as_bug {
+ self.bug(msg);
+ }
self.diagnostic().handler().fatal(msg)
}
pub fn span_err(&self, sp: Span, msg: &str) {
}
pub fn build_session(sopts: config::Options,
- local_crate_source_file: Option<Path>,
+ local_crate_source_file: Option<PathBuf>,
registry: diagnostics::registry::Registry)
-> Session {
// FIXME: This is not general enough to make the warning lint completely override
}
pub fn build_session_(sopts: config::Options,
- local_crate_source_file: Option<Path>,
+ local_crate_source_file: Option<PathBuf>,
span_diagnostic: diagnostic::SpanHandler)
-> Session {
let host = match Target::search(config::host_triple()) {
if path.is_absolute() {
path.clone()
} else {
- os::getcwd().unwrap().join(&path)
+ env::current_dir().unwrap().join(&path)
}
);
plugin_registrar_fn: Cell::new(None),
default_sysroot: default_sysroot,
local_crate_source_file: local_crate_source_file,
- working_dir: os::getcwd().unwrap(),
+ working_dir: env::current_dir().unwrap(),
lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(NodeMap()),
crate_types: RefCell::new(Vec::new()),
// except according to those terms.
use std::slice;
+use std::path::{Path, PathBuf};
#[derive(Clone, Debug)]
pub struct SearchPaths {
- paths: Vec<(PathKind, Path)>,
+ paths: Vec<(PathKind, PathBuf)>,
}
pub struct Iter<'a> {
kind: PathKind,
- iter: slice::Iter<'a, (PathKind, Path)>,
+ iter: slice::Iter<'a, (PathKind, PathBuf)>,
}
#[derive(Eq, PartialEq, Clone, Copy, Debug)]
} else {
(PathKind::All, path)
};
- self.paths.push((kind, Path::new(path)));
+ self.paths.push((kind, PathBuf::new(path)));
}
pub fn iter(&self, kind: PathKind) -> Iter {
use std::cell::{RefCell, Cell};
use std::collections::HashMap;
+use std::collections::hash_state::HashState;
+use std::ffi::CString;
use std::fmt::Debug;
use std::hash::Hash;
use std::iter::repeat;
+use std::path::Path;
use std::time::Duration;
-use std::collections::hash_state::HashState;
use syntax::ast;
use syntax::visit;
}
}
}
+
+#[cfg(unix)]
+pub fn path2cstr(p: &Path) -> CString {
+ use std::os::unix::prelude::*;
+ use std::ffi::AsOsStr;
+ CString::new(p.as_os_str().as_bytes()).unwrap()
+}
+#[cfg(windows)]
+pub fn path2cstr(p: &Path) -> CString {
+ CString::new(p.to_str().unwrap()).unwrap()
+}
fn test_lev_distance() {
use std::char::{ from_u32, MAX };
// Test bytelength agnosticity
- for c in (0u32..MAX as u32)
+ for c in (0..MAX as u32)
.filter_map(|i| from_u32(i))
.map(|i| i.to_string()) {
assert_eq!(lev_distance(&c[..], &c[..]), 0);
impl<'tcx> Repr<'tcx> for ty::TraitDef<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
- format!("TraitDef(generics={}, bounds={}, trait_ref={})",
+ format!("TraitDef(generics={}, trait_ref={})",
self.generics.repr(tcx),
- self.bounds.repr(tcx),
self.trait_ref.repr(tcx))
}
}
//! A helper class for dealing with static archives
-use std::old_io::fs::PathExtensions;
-use std::old_io::process::{Command, ProcessOutput};
-use std::old_io::{fs, TempDir};
-use std::old_io;
-use std::os;
+use std::env;
+use std::fs::{self, TempDir};
+use std::io::prelude::*;
+use std::io;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output, Stdio};
use std::str;
use syntax::diagnostic::Handler as ErrorHandler;
pub struct ArchiveConfig<'a> {
pub handler: &'a ErrorHandler,
- pub dst: Path,
- pub lib_search_paths: Vec<Path>,
+ pub dst: PathBuf,
+ pub lib_search_paths: Vec<PathBuf>,
pub slib_prefix: String,
pub slib_suffix: String,
pub maybe_ar_prog: Option<String>
pub struct Archive<'a> {
handler: &'a ErrorHandler,
- dst: Path,
- lib_search_paths: Vec<Path>,
+ dst: PathBuf,
+ lib_search_paths: Vec<PathBuf>,
slib_prefix: String,
slib_suffix: String,
maybe_ar_prog: Option<String>
archive: Archive<'a>,
work_dir: TempDir,
/// Filename of each member that should be added to the archive.
- members: Vec<Path>,
+ members: Vec<PathBuf>,
should_update_symbols: bool,
}
fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
args: &str, cwd: Option<&Path>,
- paths: &[&Path]) -> ProcessOutput {
+ paths: &[&Path]) -> Output {
let ar = match *maybe_ar_prog {
Some(ref ar) => &ar[..],
None => "ar"
};
let mut cmd = Command::new(ar);
- cmd.arg(args).args(paths);
+ cmd.arg(args).args(paths).stdout(Stdio::piped()).stderr(Stdio::piped());
debug!("{:?}", cmd);
match cwd {
Some(p) => {
- cmd.cwd(p);
+ cmd.current_dir(p);
debug!("inside {:?}", p.display());
}
None => {}
if !o.status.success() {
handler.err(&format!("{:?} failed with: {}", cmd, o.status));
handler.note(&format!("stdout ---\n{}",
- str::from_utf8(&o.output).unwrap()));
+ str::from_utf8(&o.stdout).unwrap()));
handler.note(&format!("stderr ---\n{}",
- str::from_utf8(&o.error).unwrap())
+ str::from_utf8(&o.stderr).unwrap())
);
handler.abort_if_errors();
}
}
pub fn find_library(name: &str, osprefix: &str, ossuffix: &str,
- search_paths: &[Path], handler: &ErrorHandler) -> Path {
+ search_paths: &[PathBuf],
+ handler: &ErrorHandler) -> PathBuf {
// On Windows, static libraries sometimes show up as libfoo.a and other
// times show up as foo.lib
let oslibname = format!("{}{}{}", osprefix, name, ossuffix);
let unixlibname = format!("lib{}.a", name);
for path in search_paths {
- debug!("looking for {} inside {:?}", name, path.display());
+ debug!("looking for {} inside {:?}", name, path);
let test = path.join(&oslibname[..]);
if test.exists() { return test }
if oslibname != unixlibname {
/// Lists all files in an archive
pub fn files(&self) -> Vec<String> {
let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]);
- let output = str::from_utf8(&output.output).unwrap();
+ let output = str::from_utf8(&output.stdout).unwrap();
// use lines_any because windows delimits output with `\r\n` instead of
// just `\n`
output.lines_any().map(|s| s.to_string()).collect()
/// Adds all of the contents of a native library to this archive. This will
/// search in the relevant locations for a library named `name`.
- pub fn add_native_library(&mut self, name: &str) -> old_io::IoResult<()> {
+ pub fn add_native_library(&mut self, name: &str) -> io::Result<()> {
let location = find_library(name,
&self.archive.slib_prefix,
&self.archive.slib_suffix,
/// This ignores adding the bytecode from the rlib, and if LTO is enabled
/// then the object file also isn't added.
pub fn add_rlib(&mut self, rlib: &Path, name: &str,
- lto: bool) -> old_io::IoResult<()> {
+ lto: bool) -> io::Result<()> {
// Ignoring obj file starting with the crate name
// as simple comparison is not enough - there
// might be also an extra name suffix
}
/// Adds an arbitrary file to this archive
- pub fn add_file(&mut self, file: &Path) -> old_io::IoResult<()> {
- let filename = Path::new(file.filename().unwrap());
+ pub fn add_file(&mut self, file: &Path) -> io::Result<()> {
+ let filename = Path::new(file.file_name().unwrap());
let new_file = self.work_dir.path().join(&filename);
try!(fs::copy(file, &new_file));
- self.members.push(filename);
+ self.members.push(filename.to_path_buf());
Ok(())
}
pub fn build(self) -> Archive<'a> {
// Get an absolute path to the destination, so `ar` will work even
// though we run it from `self.work_dir`.
- let abs_dst = os::getcwd().unwrap().join(&self.archive.dst);
+ let abs_dst = env::current_dir().unwrap().join(&self.archive.dst);
assert!(!abs_dst.is_relative());
- let mut args = vec![&abs_dst];
- let mut total_len = abs_dst.as_vec().len();
+ let mut args = vec![&*abs_dst];
+ let mut total_len = abs_dst.to_string_lossy().len();
if self.members.is_empty() {
// OSX `ar` does not allow using `r` with no members, but it does
const ARG_LENGTH_LIMIT: uint = 32_000;
for member_name in &self.members {
- let len = member_name.as_vec().len();
+ let len = member_name.to_string_lossy().len();
// `len + 1` to account for the space that's inserted before each
// argument. (Windows passes command-line arguments as a single
args.clear();
args.push(&abs_dst);
- total_len = abs_dst.as_vec().len();
+ total_len = abs_dst.to_string_lossy().len();
}
args.push(member_name);
}
fn add_archive<F>(&mut self, archive: &Path, name: &str,
- mut skip: F) -> old_io::IoResult<()>
+ mut skip: F) -> io::Result<()>
where F: FnMut(&str) -> bool,
{
let loc = TempDir::new("rsar").unwrap();
// First, extract the contents of the archive to a temporary directory.
// We don't unpack directly into `self.work_dir` due to the possibility
// of filename collisions.
- let archive = os::getcwd().unwrap().join(archive);
+ let archive = env::current_dir().unwrap().join(archive);
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
"x", Some(loc.path()), &[&archive]);
// We skip any files explicitly desired for skipping, and we also skip
// all SYMDEF files as these are just magical placeholders which get
// re-created when we make a new archive anyway.
- let files = try!(fs::readdir(loc.path()));
- for file in &files {
- let filename = file.filename_str().unwrap();
+ let files = try!(fs::read_dir(loc.path()));
+ for file in files {
+ let file = try!(file).path();
+ let filename = file.file_name().unwrap().to_str().unwrap();
if skip(filename) { continue }
if filename.contains(".SYMDEF") { continue }
filename
};
let new_filename = self.work_dir.path().join(&filename[..]);
- try!(fs::rename(file, &new_filename));
- self.members.push(Path::new(filename));
+ try!(fs::rename(&file, &new_filename));
+ self.members.push(PathBuf::new(&filename));
}
Ok(())
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::old_io;
+use std::io;
use std::old_io::fs;
+use std::old_io;
+use std::old_path;
use std::os;
+use std::path::{Path, PathBuf};
/// Returns an absolute path in the filesystem that `path` points to. The
/// returned path does not contain any symlinks in its hierarchy.
-pub fn realpath(original: &Path) -> old_io::IoResult<Path> {
- const MAX_LINKS_FOLLOWED: uint = 256;
+#[allow(deprecated)] // readlink is deprecated
+pub fn realpath(original: &Path) -> io::Result<PathBuf> {
+ let old = old_path::Path::new(original.to_str().unwrap());
+ match old_realpath(&old) {
+ Ok(p) => Ok(PathBuf::new(p.as_str().unwrap())),
+ Err(e) => Err(io::Error::new(io::ErrorKind::Other,
+ "realpath error",
+ Some(e.to_string())))
+ }
+}
+
+#[allow(deprecated)]
+fn old_realpath(original: &old_path::Path) -> old_io::IoResult<old_path::Path> {
+ const MAX_LINKS_FOLLOWED: usize = 256;
let original = try!(os::getcwd()).join(original);
// Right now lstat on windows doesn't work quite well
mod test {
use std::old_io;
use std::old_io::fs::{File, symlink, mkdir, mkdir_recursive};
- use super::realpath;
+ use super::old_realpath as realpath;
use std::old_io::TempDir;
#[test]
#![feature(box_syntax)]
#![feature(collections)]
#![feature(core)]
+#![feature(old_fs)]
+#![feature(fs)]
#![feature(hash)]
#![feature(int_uint)]
+#![feature(io)]
#![feature(old_io)]
-#![feature(os)]
#![feature(old_path)]
+#![feature(os)]
+#![feature(path)]
#![feature(rustc_private)]
#![feature(staged_api)]
-#![feature(path)]
+#![feature(tempdir)]
extern crate syntax;
extern crate serialize;
use std::collections::HashSet;
use std::env;
-use std::old_io::IoError;
-use std::os;
+use std::io;
+use std::path::{Path, PathBuf};
use syntax::ast;
-pub struct RPathConfig<F, G> where
- F: FnOnce() -> Path,
- G: FnMut(&Path) -> Result<Path, IoError>,
-{
- pub used_crates: Vec<(ast::CrateNum, Option<Path>)>,
- pub out_filename: Path,
+pub struct RPathConfig<'a> {
+ pub used_crates: Vec<(ast::CrateNum, Option<PathBuf>)>,
+ pub out_filename: PathBuf,
pub is_like_osx: bool,
pub has_rpath: bool,
- pub get_install_prefix_lib_path: F,
- pub realpath: G,
+ pub get_install_prefix_lib_path: &'a mut FnMut() -> PathBuf,
+ pub realpath: &'a mut FnMut(&Path) -> io::Result<PathBuf>,
}
-pub fn get_rpath_flags<F, G>(config: RPathConfig<F, G>) -> Vec<String> where
- F: FnOnce() -> Path,
- G: FnMut(&Path) -> Result<Path, IoError>,
-{
+pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
// No rpath on windows
if !config.has_rpath {
return Vec::new();
return ret;
}
-fn get_rpaths<F, G>(mut config: RPathConfig<F, G>, libs: &[Path]) -> Vec<String> where
- F: FnOnce() -> Path,
- G: FnMut(&Path) -> Result<Path, IoError>,
-{
+fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec<String> {
debug!("output: {:?}", config.out_filename.display());
debug!("libs:");
for libpath in libs {
// Use relative paths to the libraries. Binaries can be moved
// as long as they maintain the relative relationship to the
// crates they depend on.
- let rel_rpaths = get_rpaths_relative_to_output(&mut config, libs);
+ let rel_rpaths = get_rpaths_relative_to_output(config, libs);
// And a final backup rpath to the global library location.
let fallback_rpaths = vec!(get_install_prefix_rpath(config));
return rpaths;
}
-fn get_rpaths_relative_to_output<F, G>(config: &mut RPathConfig<F, G>,
- libs: &[Path]) -> Vec<String> where
- F: FnOnce() -> Path,
- G: FnMut(&Path) -> Result<Path, IoError>,
-{
+fn get_rpaths_relative_to_output(config: &mut RPathConfig,
+ libs: &[PathBuf]) -> Vec<String> {
libs.iter().map(|a| get_rpath_relative_to_output(config, a)).collect()
}
-fn get_rpath_relative_to_output<F, G>(config: &mut RPathConfig<F, G>, lib: &Path) -> String where
- F: FnOnce() -> Path,
- G: FnMut(&Path) -> Result<Path, IoError>,
-{
+fn get_rpath_relative_to_output(config: &mut RPathConfig, lib: &Path) -> String {
// Mac doesn't appear to support $ORIGIN
let prefix = if config.is_like_osx {
"@loader_path"
"$ORIGIN"
};
- let cwd = os::getcwd().unwrap();
+ let cwd = env::current_dir().unwrap();
let mut lib = (config.realpath)(&cwd.join(lib)).unwrap();
lib.pop();
let mut output = (config.realpath)(&cwd.join(&config.out_filename)).unwrap();
output.pop();
- let relative = lib.path_relative_from(&output);
- let relative = relative.expect("could not create rpath relative to output");
+ let relative = relativize(&lib, &output);
// FIXME (#9639): This needs to handle non-utf8 paths
- format!("{}/{}",
- prefix,
- relative.as_str().expect("non-utf8 component in path"))
+ format!("{}/{}", prefix,
+ relative.to_str().expect("non-utf8 component in path"))
}
-fn get_install_prefix_rpath<F, G>(config: RPathConfig<F, G>) -> String where
- F: FnOnce() -> Path,
- G: FnMut(&Path) -> Result<Path, IoError>,
-{
+fn relativize(path: &Path, rel: &Path) -> PathBuf {
+ let mut res = PathBuf::new("");
+ let mut cur = rel;
+ while !path.starts_with(cur) {
+ res.push("..");
+ match cur.parent() {
+ Some(p) => cur = p,
+ None => panic!("can't create relative paths across filesystems"),
+ }
+ }
+ match path.relative_from(cur) {
+ Some(s) => { res.push(s); res }
+ None => panic!("couldn't create relative path from {:?} to {:?}",
+ rel, path),
+ }
+
+}
+
+fn get_install_prefix_rpath(config: &mut RPathConfig) -> String {
let path = (config.get_install_prefix_lib_path)();
let path = env::current_dir().unwrap().join(&path);
// FIXME (#9639): This needs to handle non-utf8 paths
mod test {
use super::{RPathConfig};
use super::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output};
+ use std::path::{Path, PathBuf};
#[test]
fn test_rpaths_to_flags() {
}
#[test]
- #[cfg(any(target_os = "linux", target_os = "android"))]
fn test_rpath_relative() {
- let config = &mut RPathConfig {
- used_crates: Vec::new(),
- out_filename: Path::new("bin/rustc"),
- get_install_prefix_lib_path: || panic!(),
- has_rpath: true,
- is_like_osx: false,
- realpath: |p| Ok(p.clone())
- };
- let res = get_rpath_relative_to_output(config, &Path::new("lib/libstd.so"));
- assert_eq!(res, "$ORIGIN/../lib");
- }
-
- #[test]
- #[cfg(any(target_os = "freebsd",
- target_os = "dragonfly",
- target_os = "bitrig",
- target_os = "openbsd"))]
- fn test_rpath_relative() {
- let config = &mut RPathConfig {
- used_crates: Vec::new(),
- has_rpath: true,
- is_like_osx: false,
- out_filename: Path::new("bin/rustc"),
- get_install_prefix_lib_path: || panic!(),
- realpath: |p| Ok(p.clone())
- };
- let res = get_rpath_relative_to_output(config, &Path::new("lib/libstd.so"));
- assert_eq!(res, "$ORIGIN/../lib");
- }
-
- #[test]
- #[cfg(target_os = "macos")]
- fn test_rpath_relative() {
- let config = &mut RPathConfig {
- used_crates: Vec::new(),
- has_rpath: true,
- is_like_osx: true,
- out_filename: Path::new("bin/rustc"),
- get_install_prefix_lib_path: || panic!(),
- realpath: |p| Ok(p.clone())
- };
- let res = get_rpath_relative_to_output(config, &Path::new("lib/libstd.so"));
- assert_eq!(res, "@loader_path/../lib");
+ if cfg!(target_os = "macos") {
+ let config = &mut RPathConfig {
+ used_crates: Vec::new(),
+ has_rpath: true,
+ is_like_osx: true,
+ out_filename: PathBuf::new("bin/rustc"),
+ get_install_prefix_lib_path: &mut || panic!(),
+ realpath: &mut |p| Ok(p.to_path_buf()),
+ };
+ let res = get_rpath_relative_to_output(config,
+ Path::new("lib/libstd.so"));
+ assert_eq!(res, "@loader_path/../lib");
+ } else {
+ let config = &mut RPathConfig {
+ used_crates: Vec::new(),
+ out_filename: PathBuf::new("bin/rustc"),
+ get_install_prefix_lib_path: &mut || panic!(),
+ has_rpath: true,
+ is_like_osx: false,
+ realpath: &mut |p| Ok(p.to_path_buf()),
+ };
+ let res = get_rpath_relative_to_output(config,
+ Path::new("lib/libstd.so"));
+ assert_eq!(res, "$ORIGIN/../lib");
+ }
}
}
/// Create a new FixedBuffer64
fn new() -> FixedBuffer64 {
return FixedBuffer64 {
- buffer: [0u8; 64],
+ buffer: [0; 64],
buffer_idx: 0
};
}
/// Convenience function that retrieves the result of a digest as a
/// newly allocated vec of bytes.
fn result_bytes(&mut self) -> Vec<u8> {
- let mut buf: Vec<u8> = repeat(0u8).take((self.output_bits()+7)/8).collect();
+ let mut buf: Vec<u8> = repeat(0).take((self.output_bits()+7)/8).collect();
self.result(&mut buf);
buf
}
let mut g = self.h6;
let mut h = self.h7;
- let mut w = [0u32; 64];
+ let mut w = [0; 64];
// Sha-512 and Sha-256 use basically the same calculations which are implemented
// by these macros. Inlining the calculations seems to result in better generated code.
#[bench]
pub fn sha256_10(b: &mut Bencher) {
let mut sh = Sha256::new();
- let bytes = [1u8; 10];
+ let bytes = [1; 10];
b.iter(|| {
sh.input(&bytes);
});
#[bench]
pub fn sha256_1k(b: &mut Bencher) {
let mut sh = Sha256::new();
- let bytes = [1u8; 1024];
+ let bytes = [1; 1024];
b.iter(|| {
sh.input(&bytes);
});
#[bench]
pub fn sha256_64k(b: &mut Bencher) {
let mut sh = Sha256::new();
- let bytes = [1u8; 65536];
+ let bytes = [1; 65536];
b.iter(|| {
sh.input(&bytes);
});
use serialize::json::Json;
use syntax::{diagnostic, abi};
use std::default::Default;
-use std::old_io::fs::PathExtensions;
+use std::io::prelude::*;
mod windows_base;
mod linux_base;
base
}
- /// Search RUST_TARGET_PATH for a JSON file specifying the given target triple. Note that it
- /// could also just be a bare filename already, so also check for that. If one of the hardcoded
- /// targets we know about, just return it directly.
+ /// Search RUST_TARGET_PATH for a JSON file specifying the given target
+ /// triple. Note that it could also just be a bare filename already, so also
+ /// check for that. If one of the hardcoded targets we know about, just
+ /// return it directly.
///
- /// The error string could come from any of the APIs called, including filesystem access and
- /// JSON decoding.
+ /// The error string could come from any of the APIs called, including
+ /// filesystem access and JSON decoding.
pub fn search(target: &str) -> Result<Target, String> {
use std::env;
- use std::os;
use std::ffi::OsString;
- use std::old_io::File;
- use std::old_path::Path;
+ use std::fs::File;
+ use std::path::{Path, PathBuf};
use serialize::json;
fn load_file(path: &Path) -> Result<Target, String> {
- let mut f = try!(File::open(path).map_err(|e| format!("{:?}", e)));
- let obj = try!(json::from_reader(&mut f).map_err(|e| format!("{:?}", e)));
+ let mut f = try!(File::open(path).map_err(|e| e.to_string()));
+ let mut contents = Vec::new();
+ try!(f.read_to_end(&mut contents).map_err(|e| e.to_string()));
+ let obj = try!(json::from_reader(&mut &contents[..])
+ .map_err(|e| e.to_string()));
Ok(Target::from_json(obj))
}
let path = {
let mut target = target.to_string();
target.push_str(".json");
- Path::new(target)
+ PathBuf::new(&target)
};
- let target_path = env::var_os("RUST_TARGET_PATH").unwrap_or(OsString::from_str(""));
+ let target_path = env::var_os("RUST_TARGET_PATH")
+ .unwrap_or(OsString::from_str(""));
// FIXME 16351: add a sane default search path?
- for dir in os::split_paths(target_path.to_str().unwrap()).iter() {
- let p = dir.join(path.clone());
+ for dir in env::split_paths(&target_path) {
+ let p = dir.join(&path);
if p.is_file() {
return load_file(&p);
}
bitflags! {
flags AnotherSetOfFlags: i8 {
- const AnotherFlag = -1_i8,
+ const AnotherFlag = -1,
}
}
assert_eq!(FlagABC.bits(), 0b00000111);
assert_eq!(AnotherSetOfFlags::empty().bits(), 0b00);
- assert_eq!(AnotherFlag.bits(), !0_i8);
+ assert_eq!(AnotherFlag.bits(), !0);
}
#[test]
assert!(Flags::from_bits(0b11) == Some(FlagA | FlagB));
assert!(Flags::from_bits(0b1000) == None);
- assert!(AnotherSetOfFlags::from_bits(!0_i8) == Some(AnotherFlag));
+ assert!(AnotherSetOfFlags::from_bits(!0) == Some(AnotherFlag));
}
#[test]
assert!(Flags::from_bits_truncate(0b1000) == Flags::empty());
assert!(Flags::from_bits_truncate(0b1001) == FlagA);
- assert!(AnotherSetOfFlags::from_bits_truncate(0_i8) == AnotherSetOfFlags::empty());
+ assert!(AnotherSetOfFlags::from_bits_truncate(0) == AnotherSetOfFlags::empty());
}
#[test]
bccx.span_note(
move_to_span,
"attempting to move value to here");
- bccx.span_help(
+ bccx.fileline_help(
move_to_span,
&format!("to prevent the move, \
use `ref {0}` or `ref mut {0}` to capture value by \
ol,
moved_lp_msg,
pat_ty.user_string(self.tcx)));
- self.tcx.sess.span_help(span,
+ self.tcx.sess.fileline_help(span,
"use `ref` to override");
}
moved_lp_msg,
expr_ty.user_string(self.tcx),
suggestion));
- self.tcx.sess.span_help(expr_span, help);
+ self.tcx.sess.fileline_help(expr_span, help);
}
}
self.tcx.sess.span_help(s, m);
}
+ pub fn fileline_help(&self, s: Span, m: &str) {
+ self.tcx.sess.fileline_help(s, m);
+ }
+
pub fn bckerr_to_string(&self, err: &BckError<'tcx>) -> String {
match err.code {
err_mutbl => {
}
if is_closure {
- self.tcx.sess.span_help(
+ self.tcx.sess.fileline_help(
span,
"closures behind references must be called via `&mut`");
}
use serialize::json;
use std::env;
-use std::os;
use std::ffi::OsString;
-use std::old_io::fs;
-use std::old_io;
+use std::fs;
+use std::io::{self, Write};
+use std::path::{Path, PathBuf};
use syntax::ast;
use syntax::ast_map;
use syntax::attr;
pub fn compile_input(sess: Session,
cfg: ast::CrateConfig,
input: &Input,
- outdir: &Option<Path>,
- output: &Option<Path>,
+ outdir: &Option<PathBuf>,
+ output: &Option<PathBuf>,
addl_plugins: Option<Vec<String>>,
control: CompileController) {
macro_rules! controller_entry_point{($point: ident, $make_state: expr) => ({
pub fn source_name(input: &Input) -> String {
match *input {
// FIXME (#9639): This needs to handle non-utf8 paths
- Input::File(ref ifile) => ifile.as_str().unwrap().to_string(),
+ Input::File(ref ifile) => ifile.to_str().unwrap().to_string(),
Input::Str(_) => anon_src()
}
}
impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> {
fn empty(input: &'a Input,
session: &'a Session,
- out_dir: &'a Option<Path>)
+ out_dir: &'a Option<PathBuf>)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
input: input,
session: session,
- out_dir: out_dir.as_ref(),
+ out_dir: out_dir.as_ref().map(|s| &**s),
cfg: None,
krate: None,
crate_name: None,
fn state_after_parse(input: &'a Input,
session: &'a Session,
- out_dir: &'a Option<Path>,
+ out_dir: &'a Option<PathBuf>,
krate: &'a ast::Crate)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
fn state_after_expand(input: &'a Input,
session: &'a Session,
- out_dir: &'a Option<Path>,
+ out_dir: &'a Option<PathBuf>,
expanded_crate: &'a ast::Crate,
crate_name: &'a str)
-> CompileState<'a, 'ast, 'tcx> {
fn state_after_write_deps(input: &'a Input,
session: &'a Session,
- out_dir: &'a Option<Path>,
+ out_dir: &'a Option<PathBuf>,
ast_map: &'a ast_map::Map<'ast>,
expanded_crate: &'a ast::Crate,
crate_name: &'a str)
fn state_after_analysis(input: &'a Input,
session: &'a Session,
- out_dir: &'a Option<Path>,
+ out_dir: &'a Option<PathBuf>,
expanded_crate: &'a ast::Crate,
analysis: &'a ty::CrateAnalysis<'tcx>,
tcx: &'a ty::ctxt<'tcx>)
fn state_after_llvm(input: &'a Input,
session: &'a Session,
- out_dir: &'a Option<Path>,
+ out_dir: &'a Option<PathBuf>,
trans: &'a trans::CrateTranslation)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
if cfg!(windows) {
_old_path = env::var_os("PATH").unwrap_or(_old_path);
let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths();
- new_path.extend(os::split_paths(_old_path.to_str().unwrap()).into_iter());
+ new_path.extend(env::split_paths(&_old_path));
env::set_var("PATH", &env::join_paths(new_path.iter()).unwrap());
}
let features = sess.features.borrow();
}
);
- // Needs to go *after* expansion to be able to check the results of macro expansion.
- time(time_passes, "complete gated feature checking", (), |_| {
+ // Needs to go *after* expansion to be able to check the results
+ // of macro expansion. This runs before #[cfg] to try to catch as
+ // much as possible (e.g. help the programmer avoid platform
+ // specific differences)
+ time(time_passes, "complete gated feature checking 1", (), |_| {
let features =
syntax::feature_gate::check_crate(sess.codemap(),
- &sess.parse_sess.span_diagnostic,
- &krate);
+ &sess.parse_sess.span_diagnostic,
+ &krate,
+ true);
*sess.features.borrow_mut() = features;
sess.abort_if_errors();
});
time(time_passes, "checking that all macro invocations are gone", &krate, |krate|
syntax::ext::expand::check_for_macros(&sess.parse_sess, krate));
+ // One final feature gating of the true AST that gets compiled
+ // later, to make sure we've got everything (e.g. configuration
+ // can insert new attributes via `cfg_attr`)
+ time(time_passes, "complete gated feature checking 2", (), |_| {
+ let features =
+ syntax::feature_gate::check_crate(sess.codemap(),
+ &sess.parse_sess.span_diagnostic,
+ &krate,
+ false);
+ *sess.features.borrow_mut() = features;
+ sess.abort_if_errors();
+ });
+
Some(krate)
}
// Remove assembly source, unless --save-temps was specified
if !sess.opts.cg.save_temps {
- fs::unlink(&outputs.temp_path(config::OutputTypeAssembly)).unwrap();
+ fs::remove_file(&outputs.temp_path(config::OutputTypeAssembly)).unwrap();
}
} else {
time(sess.time_passes(), "LLVM passes", (), |_|
outputs: &OutputFilenames) {
let old_path = env::var_os("PATH").unwrap_or(OsString::from_str(""));
let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths();
- new_path.extend(os::split_paths(old_path.to_str().unwrap()).into_iter());
+ new_path.extend(env::split_paths(&old_path));
env::set_var("PATH", &env::join_paths(new_path.iter()).unwrap());
time(sess.time_passes(), "linking", (), |_|
_ => return,
};
- let result = (|| -> old_io::IoResult<()> {
+ let result = (|| -> io::Result<()> {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let files: Vec<String> = sess.codemap().files.borrow()
.iter().filter(|fmap| fmap.is_real_file())
.map(|fmap| escape_dep_filename(&fmap.name))
.collect();
- let mut file = try!(old_io::File::create(&deps_filename));
+ let mut file = try!(fs::File::create(&deps_filename));
for path in &out_filenames {
- try!(write!(&mut file as &mut Writer,
- "{}: {}\n\n", path.display(), files.connect(" ")));
+ try!(write!(&mut file,
+ "{}: {}\n\n", path.display(), files.connect(" ")));
}
Ok(())
})();
}
pub fn build_output_filenames(input: &Input,
- odir: &Option<Path>,
- ofile: &Option<Path>,
+ odir: &Option<PathBuf>,
+ ofile: &Option<PathBuf>,
attrs: &[ast::Attribute],
sess: &Session)
-> OutputFilenames {
// We want to toss everything after the final '.'
let dirpath = match *odir {
Some(ref d) => d.clone(),
- None => Path::new(".")
+ None => PathBuf::new(".")
};
// If a crate name is present, we use it as the link name
sess.warn("ignoring --out-dir flag due to -o flag.");
}
OutputFilenames {
- out_directory: out_file.dir_path(),
- out_filestem: out_file.filestem_str().unwrap().to_string(),
+ out_directory: out_file.parent().unwrap().to_path_buf(),
+ out_filestem: out_file.file_stem().unwrap()
+ .to_str().unwrap().to_string(),
single_output_file: ofile,
extra: sess.opts.cg.extra_filename.clone(),
}
#![feature(old_io)]
#![feature(libc)]
#![feature(os)]
-#![feature(old_path)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(unicode)]
#![feature(exit_status)]
+#![feature(path)]
+#![feature(io)]
+#![feature(fs)]
extern crate arena;
extern crate flate;
use rustc::util::common::time;
use std::cmp::Ordering::Equal;
-use std::old_io::{self, stdio};
-use std::iter::repeat;
use std::env;
+use std::iter::repeat;
+use std::old_io::{self, stdio};
+use std::path::PathBuf;
use std::sync::mpsc::channel;
use std::thread;
}
// Extract output directory and file from matches.
-fn make_output(matches: &getopts::Matches) -> (Option<Path>, Option<Path>) {
- let odir = matches.opt_str("out-dir").map(|o| Path::new(o));
- let ofile = matches.opt_str("o").map(|o| Path::new(o));
+fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>) {
+ let odir = matches.opt_str("out-dir").map(|o| PathBuf::new(&o));
+ let ofile = matches.opt_str("o").map(|o| PathBuf::new(&o));
(odir, ofile)
}
// Extract input (string or file and optional path) from matches.
-fn make_input(free_matches: &[String]) -> Option<(Input, Option<Path>)> {
+fn make_input(free_matches: &[String]) -> Option<(Input, Option<PathBuf>)> {
if free_matches.len() == 1 {
let ifile = &free_matches[0][..];
if ifile == "-" {
let src = String::from_utf8(contents).unwrap();
Some((Input::Str(src), None))
} else {
- Some((Input::File(Path::new(ifile)), Some(Path::new(ifile))))
+ Some((Input::File(PathBuf::new(ifile)), Some(PathBuf::new(ifile))))
}
} else {
None
&getopts::Matches,
&Session,
&Input,
- &Option<Path>,
- &Option<Path>)
+ &Option<PathBuf>,
+ &Option<PathBuf>)
-> Compilation;
// Called after we extract the input from the arguments. Gives the implementer
// an opportunity to change the inputs or to add some custom input handling.
// The default behaviour is to simply pass through the inputs.
- fn some_input(&mut self, input: Input, input_path: Option<Path>) -> (Input, Option<Path>) {
+ fn some_input(&mut self, input: Input, input_path: Option<PathBuf>)
+ -> (Input, Option<PathBuf>) {
(input, input_path)
}
fn no_input(&mut self,
&getopts::Matches,
&config::Options,
- &Option<Path>,
- &Option<Path>,
+ &Option<PathBuf>,
+ &Option<PathBuf>,
&diagnostics::registry::Registry)
- -> Option<(Input, Option<Path>)>;
+ -> Option<(Input, Option<PathBuf>)>;
// Parse pretty printing information from the arguments. The implementer can
// choose to ignore this (the default will return None) which will skip pretty
fn no_input(&mut self,
matches: &getopts::Matches,
sopts: &config::Options,
- odir: &Option<Path>,
- ofile: &Option<Path>,
+ odir: &Option<PathBuf>,
+ ofile: &Option<PathBuf>,
descriptions: &diagnostics::registry::Registry)
- -> Option<(Input, Option<Path>)> {
+ -> Option<(Input, Option<PathBuf>)> {
match matches.free.len() {
0 => {
if sopts.describe_lints {
matches: &getopts::Matches,
sess: &Session,
input: &Input,
- odir: &Option<Path>,
- ofile: &Option<Path>)
+ odir: &Option<PathBuf>,
+ ofile: &Option<PathBuf>)
-> Compilation {
RustcDefaultCalls::print_crate_info(sess, Some(input), odir, ofile).and_then(
|| RustcDefaultCalls::list_metadata(sess, matches, input))
if r.contains(&("ls".to_string())) {
match input {
&Input::File(ref ifile) => {
- let mut stdout = old_io::stdout();
let path = &(*ifile);
+ let mut v = Vec::new();
metadata::loader::list_file_metadata(sess.target.target.options.is_like_osx,
path,
- &mut stdout).unwrap();
+ &mut v).unwrap();
+ println!("{}", String::from_utf8(v).unwrap());
}
&Input::Str(_) => {
early_error("cannot list metadata for stdin");
fn print_crate_info(sess: &Session,
input: Option<&Input>,
- odir: &Option<Path>,
- ofile: &Option<Path>)
+ odir: &Option<PathBuf>,
+ ofile: &Option<PathBuf>)
-> Compilation {
if sess.opts.prints.len() == 0 {
return Compilation::Continue;
style,
&id,
&t_outputs.with_extension(""));
- println!("{}", fname.filename_display());
+ println!("{}", fname.file_name().unwrap()
+ .to_string_lossy());
}
}
}
///
/// The diagnostic emitter yielded to the procedure should be used for reporting
/// errors of the compiler.
+#[allow(deprecated)]
pub fn monitor<F:FnOnce()+Send+'static>(f: F) {
const STACK_SIZE: uint = 8 * 1024 * 1024; // 8MB
use graphviz as dot;
-use std::old_io::{self, MemReader};
+use std::fs::File;
+use std::io::{self, Write};
+use std::old_io;
use std::option;
+use std::path::PathBuf;
use std::str::FromStr;
#[derive(Copy, PartialEq, Debug)]
impl<'ast> pprust::PpAnn for IdentifiedAnnotation<'ast> {
fn pre(&self,
s: &mut pprust::State,
- node: pprust::AnnNode) -> old_io::IoResult<()> {
+ node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeExpr(_) => s.popen(),
_ => Ok(())
}
fn post(&self,
s: &mut pprust::State,
- node: pprust::AnnNode) -> old_io::IoResult<()> {
+ node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeIdent(_) | pprust::NodeName(_) => Ok(()),
impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
fn post(&self,
s: &mut pprust::State,
- node: pprust::AnnNode) -> old_io::IoResult<()> {
+ node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeIdent(&ast::Ident { name: ast::Name(nm), ctxt }) => {
try!(pp::space(&mut s.s));
impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> {
fn pre(&self,
s: &mut pprust::State,
- node: pprust::AnnNode) -> old_io::IoResult<()> {
+ node: pprust::AnnNode) -> io::Result<()> {
match node {
pprust::NodeExpr(_) => s.popen(),
_ => Ok(())
}
fn post(&self,
s: &mut pprust::State,
- node: pprust::AnnNode) -> old_io::IoResult<()> {
+ node: pprust::AnnNode) -> io::Result<()> {
let tcx = &self.analysis.ty_cx;
match node {
pprust::NodeExpr(expr) => {
input: &Input,
ppm: PpMode,
opt_uii: Option<UserIdentifiedItem>,
- ofile: Option<Path>) {
+ ofile: Option<PathBuf>) {
let krate = driver::phase_1_parse_input(&sess, cfg, input);
let krate = if let PpmSource(PpmEveryBodyLoops) = ppm {
let src_name = driver::source_name(input);
let src = sess.codemap().get_filemap(&src_name[..])
- .src.as_bytes().to_vec();
- let mut rdr = MemReader::new(src);
+ .src
+ .as_ref()
+ .unwrap()
+ .as_bytes()
+ .to_vec();
+ let mut rdr = &src[..];
- let out = match ofile {
- None => box old_io::stdout() as Box<Writer+'static>,
- Some(p) => {
- let r = old_io::File::create(&p);
- match r {
- Ok(w) => box w as Box<Writer+'static>,
- Err(e) => panic!("print-print failed to open {} due to {}",
- p.display(), e),
- }
- }
- };
+ let mut out = Vec::new();
match (ppm, opt_uii) {
- (PpmSource(s), None) =>
+ (PpmSource(s), None) => {
+ let out: &mut Write = &mut out;
s.call_with_pp_support(
- sess, ast_map, &arenas, id, out, |annotation, out| {
+ sess, ast_map, &arenas, id, box out, |annotation, out| {
debug!("pretty printing source code {:?}", s);
let sess = annotation.sess();
pprust::print_crate(sess.codemap(),
out,
annotation.pp_ann(),
is_expanded)
- }),
+ })
+ }
- (PpmSource(s), Some(uii)) =>
+ (PpmSource(s), Some(uii)) => {
+ let out: &mut Write = &mut out;
s.call_with_pp_support(
sess, ast_map, &arenas, id, (out,uii), |annotation, (out,uii)| {
debug!("pretty printing source code {:?}", s);
sess.diagnostic(),
src_name.to_string(),
&mut rdr,
- out,
+ box out,
annotation.pp_ann(),
is_expanded);
for node_id in uii.all_matching_node_ids(ast_map) {
try!(pp::hardbreak(&mut pp_state.s));
}
pp::eof(&mut pp_state.s)
- }),
+ })
+ }
(PpmFlowGraph(mode), opt_uii) => {
debug!("pretty printing flow graph for {:?}", opt_uii);
});
let code = blocks::Code::from_node(node);
+ let out: &mut Writer = &mut out;
match code {
Some(code) => {
let variants = gather_flowgraph_variants(&sess);
}
}
}
- }.unwrap()
+ }.unwrap();
+
+ match ofile {
+ None => print!("{}", String::from_utf8(out).unwrap()),
+ Some(p) => {
+ match File::create(&p) {
+ Ok(mut w) => w.write_all(&out).unwrap(),
+ Err(e) => panic!("print-print failed to open {} due to {}",
+ p.display(), e),
+ }
+ }
+ }
}
fn print_flowgraph<W:old_io::Writer>(variants: Vec<borrowck_dot::Variant>,
analysis: ty::CrateAnalysis,
code: blocks::Code,
mode: PpFlowGraphMode,
- mut out: W) -> old_io::IoResult<()> {
+ mut out: W) -> io::Result<()> {
let ty_cx = &analysis.ty_cx;
let cfg = match code {
blocks::BlockCode(block) => cfg::CFG::new(ty_cx, &*block),
}
}
- fn expand_err_details(r: old_io::IoResult<()>) -> old_io::IoResult<()> {
+ fn expand_err_details(r: old_io::IoResult<()>) -> io::Result<()> {
r.map_err(|ioerr| {
- let orig_detail = ioerr.detail.clone();
- let m = "graphviz::render failed";
- old_io::IoError {
- detail: Some(match orig_detail {
- None => m.to_string(),
- Some(d) => format!("{}: {}", m, d)
- }),
- ..ioerr
- }
+ io::Error::new(io::ErrorKind::Other, "graphviz::render failed",
+ Some(ioerr.to_string()))
})
}
}
}
def::DefTy(..) => {
let tty = match self.cx.tcx.ast_ty_to_ty_cache.borrow().get(&id) {
- Some(&ty::atttce_resolved(t)) => t,
- _ => panic!("ast_ty_to_ty_cache was incomplete after typeck!")
+ Some(&t) => t,
+ None => panic!("ast_ty_to_ty_cache was incomplete after typeck!")
};
if !ty::is_ffi_safe(self.cx.tcx, tty) {
for call in &self_call_spans {
sess.span_note(*call, "recursive call site")
}
- sess.span_help(sp, "a `loop` may express intention better if this is on purpose")
+ sess.fileline_help(sp, "a `loop` may express intention \
+ better if this is on purpose")
}
}
use std::ffi::CString;
use std::mem;
use std::raw;
+use std::path::Path;
pub struct ArchiveRO {
ptr: ArchiveRef,
/// If this archive is used with a mutable method, then an error will be
/// raised.
pub fn open(dst: &Path) -> Option<ArchiveRO> {
- unsafe {
- let s = CString::new(dst.as_vec()).unwrap();
+ return unsafe {
+ let s = path2cstr(dst);
let ar = ::LLVMRustOpenArchive(s.as_ptr());
if ar.is_null() {
None
} else {
Some(ArchiveRO { ptr: ar })
}
+ };
+
+ #[cfg(unix)]
+ fn path2cstr(p: &Path) -> CString {
+ use std::os::unix::prelude::*;
+ use std::ffi::AsOsStr;
+ CString::new(p.as_os_str().as_bytes()).unwrap()
+ }
+ #[cfg(windows)]
+ fn path2cstr(p: &Path) -> CString {
+ CString::new(p.to_str().unwrap()).unwrap()
}
}
#![feature(int_uint)]
#![feature(libc)]
#![feature(link_args)]
-#![feature(old_path)]
#![feature(staged_api)]
#![feature(std_misc)]
+#![feature(path)]
extern crate libc;
#[macro_use] #[no_link] extern crate rustc_bitflags;
uses it like a function name",
path_name));
- self.session.span_help(expr.span,
- &format!("Did you mean to write: \
- `{} {{ /* fields */ }}`?",
- path_name));
+ let msg = format!("Did you mean to write: \
+ `{} {{ /* fields */ }}`?",
+ path_name);
+ if self.emit_errors {
+ self.session.fileline_help(expr.span, &msg);
+ } else {
+ self.session.span_help(expr.span, &msg);
+ }
} else {
// Write the result into the def map.
debug!("(resolving expr) resolved `{}`",
match type_res.map(|r| r.base_def) {
Some(DefTy(struct_id, _))
if self.structs.contains_key(&struct_id) => {
- self.resolve_error(expr.span,
+ self.resolve_error(expr.span,
&format!("`{}` is a structure name, but \
this expression \
uses it like a function name",
path_name));
- self.session.span_help(expr.span,
- &format!("Did you mean to write: \
- `{} {{ /* fields */ }}`?",
- path_name));
-
- }
+ let msg = format!("Did you mean to write: \
+ `{} {{ /* fields */ }}`?",
+ path_name);
+ if self.emit_errors {
+ self.session.fileline_help(expr.span, &msg);
+ } else {
+ self.session.span_help(expr.span, &msg);
+ }
+ }
_ => {
// Keep reporting some errors even if they're ignored above.
self.resolve_path(expr.id, path, 0, ValueNS, true);
use util::ppaux;
use util::sha2::{Digest, Sha256};
-use std::old_io::fs::PathExtensions;
-use std::old_io::{fs, TempDir, Command};
-use std::old_io;
+use std::ffi::{AsOsStr, OsString};
+use std::fs::{self, TempDir, PathExt};
+use std::io::{self, Read, Write};
use std::mem;
+use std::path::{Path, PathBuf};
+use std::process::Command;
use std::str;
-use std::string::String;
use flate;
use serialize::hex::ToHex;
use syntax::ast;
return validate(s.to_string(), Some(attr.span));
}
if let Input::File(ref path) = *input {
- if let Some(s) = path.filestem_str() {
+ if let Some(s) = path.file_stem().and_then(|s| s.to_str()) {
return validate(s.to_string(), None);
}
}
}
pub fn remove(sess: &Session, path: &Path) {
- match fs::unlink(path) {
+ match fs::remove_file(path) {
Ok(..) => {}
Err(e) => {
sess.err(&format!("failed to remove {}: {}",
pub fn link_binary(sess: &Session,
trans: &CrateTranslation,
outputs: &OutputFilenames,
- crate_name: &str) -> Vec<Path> {
+ crate_name: &str) -> Vec<PathBuf> {
let mut out_filenames = Vec::new();
for &crate_type in &*sess.crate_types.borrow() {
if invalid_output_for_target(sess, crate_type) {
}
fn is_writeable(p: &Path) -> bool {
- match p.stat() {
+ match p.metadata() {
Err(..) => true,
- Ok(m) => m.perm & old_io::USER_WRITE == old_io::USER_WRITE
+ Ok(m) => !m.permissions().readonly()
}
}
pub fn filename_for_input(sess: &Session,
crate_type: config::CrateType,
name: &str,
- out_filename: &Path) -> Path {
+ out_filename: &Path) -> PathBuf {
let libname = format!("{}{}", name, sess.opts.cg.extra_filename);
match crate_type {
config::CrateTypeRlib => {
- out_filename.with_filename(format!("lib{}.rlib", libname))
+ out_filename.with_file_name(&format!("lib{}.rlib", libname))
}
config::CrateTypeDylib => {
let (prefix, suffix) = (&sess.target.target.options.dll_prefix,
&sess.target.target.options.dll_suffix);
- out_filename.with_filename(format!("{}{}{}",
- prefix,
- libname,
- suffix))
+ out_filename.with_file_name(&format!("{}{}{}",
+ prefix,
+ libname,
+ suffix))
}
config::CrateTypeStaticlib => {
- out_filename.with_filename(format!("lib{}.a", libname))
+ out_filename.with_file_name(&format!("lib{}.a", libname))
}
config::CrateTypeExecutable => {
let suffix = &sess.target.target.options.exe_suffix;
- out_filename.with_filename(format!("{}{}", libname, suffix))
+ out_filename.with_file_name(&format!("{}{}", libname, suffix))
}
}
}
trans: &CrateTranslation,
crate_type: config::CrateType,
outputs: &OutputFilenames,
- crate_name: &str) -> Path {
+ crate_name: &str) -> PathBuf {
let obj_filename = outputs.temp_path(OutputTypeObject);
let out_filename = match outputs.single_output_file {
Some(ref file) => file.clone(),
out_filename
}
-fn archive_search_paths(sess: &Session) -> Vec<Path> {
+fn archive_search_paths(sess: &Session) -> Vec<PathBuf> {
let mut search = Vec::new();
sess.target_filesearch(PathKind::Native).for_each_lib_search_path(|path, _| {
- search.push(path.clone());
+ search.push(path.to_path_buf());
FileDoesntMatch
});
return search;
let handler = &sess.diagnostic().handler;
let config = ArchiveConfig {
handler: handler,
- dst: out_filename.clone(),
+ dst: out_filename.to_path_buf(),
lib_search_paths: archive_search_paths(sess),
slib_prefix: sess.target.target.options.staticlib_prefix.clone(),
slib_suffix: sess.target.target.options.staticlib_suffix.clone(),
// the same filename for metadata (stomping over one another)
let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir");
let metadata = tmpdir.path().join(METADATA_FILENAME);
- match fs::File::create(&metadata).write_all(&trans.metadata) {
+ match fs::File::create(&metadata).and_then(|mut f| {
+ f.write_all(&trans.metadata)
+ }) {
Ok(..) => {}
Err(e) => {
sess.err(&format!("failed to write {}: {}",
let bc_deflated_filename = obj_filename.with_extension(
&format!("{}.bytecode.deflate", i));
- let bc_data = match fs::File::open(&bc_filename).read_to_end() {
- Ok(buffer) => buffer,
+ let mut bc_data = Vec::new();
+ match fs::File::open(&bc_filename).and_then(|mut f| {
+ f.read_to_end(&mut bc_data)
+ }) {
+ Ok(..) => {}
Err(e) => sess.fatal(&format!("failed to read bytecode: {}",
e))
- };
+ }
let bc_data_deflated = match flate::deflate_bytes(&bc_data[..]) {
Some(compressed) => compressed,
- None => sess.fatal(&format!("failed to compress bytecode from {}",
- bc_filename.display()))
+ None => sess.fatal(&format!("failed to compress bytecode \
+ from {}",
+ bc_filename.display()))
};
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
Ok(file) => file,
Err(e) => {
- sess.fatal(&format!("failed to create compressed bytecode \
- file: {}", e))
+ sess.fatal(&format!("failed to create compressed \
+ bytecode file: {}", e))
}
};
match write_rlib_bytecode_object_v1(&mut bc_file_deflated,
- bc_data_deflated.as_slice()) {
+ &bc_data_deflated) {
Ok(()) => {}
Err(e) => {
sess.err(&format!("failed to write compressed bytecode: \
ab
}
-fn write_rlib_bytecode_object_v1<T: Writer>(writer: &mut T,
- bc_data_deflated: &[u8])
- -> ::std::old_io::IoResult<()> {
+fn write_rlib_bytecode_object_v1(writer: &mut Write,
+ bc_data_deflated: &[u8]) -> io::Result<()> {
let bc_data_deflated_size: u64 = bc_data_deflated.len() as u64;
- try! { writer.write_all(RLIB_BYTECODE_OBJECT_MAGIC) };
- try! { writer.write_le_u32(1) };
- try! { writer.write_le_u64(bc_data_deflated_size) };
- try! { writer.write_all(&bc_data_deflated[..]) };
+ try!(writer.write_all(RLIB_BYTECODE_OBJECT_MAGIC));
+ try!(writer.write_all(&[1, 0, 0, 0]));
+ try!(writer.write_all(&[
+ (bc_data_deflated_size >> 0) as u8,
+ (bc_data_deflated_size >> 8) as u8,
+ (bc_data_deflated_size >> 16) as u8,
+ (bc_data_deflated_size >> 24) as u8,
+ (bc_data_deflated_size >> 32) as u8,
+ (bc_data_deflated_size >> 40) as u8,
+ (bc_data_deflated_size >> 48) as u8,
+ (bc_data_deflated_size >> 56) as u8,
+ ]));
+ try!(writer.write_all(&bc_data_deflated));
let number_of_bytes_written_so_far =
RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id
// padding byte to make it even. This works around a crash bug in LLDB
// (see issue #15950)
if number_of_bytes_written_so_far % 2 == 1 {
- try! { writer.write_u8(0) };
+ try!(writer.write_all(&[0]));
}
return Ok(());
pname,
prog.status));
sess.note(&format!("{:?}", &cmd));
- let mut output = prog.error.clone();
- output.push_all(&prog.output);
+ let mut output = prog.stderr.clone();
+ output.push_all(&prog.stdout);
sess.note(str::from_utf8(&output[..]).unwrap());
sess.abort_if_errors();
}
- debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap());
- debug!("linker stdout:\n{}", String::from_utf8(prog.output).unwrap());
+ debug!("linker stderr:\n{}", String::from_utf8(prog.stderr).unwrap());
+ debug!("linker stdout:\n{}", String::from_utf8(prog.stdout).unwrap());
},
Err(e) => {
sess.err(&format!("could not exec the linker `{}`: {}",
if t.options.is_like_osx {
let morestack = lib_path.join("libmorestack.a");
- let mut v = b"-Wl,-force_load,".to_vec();
- v.push_all(morestack.as_vec());
- cmd.arg(&v[..]);
+ let mut v = OsString::from_str("-Wl,-force_load,");
+ v.push_os_str(morestack.as_os_str());
+ cmd.arg(&v);
} else {
cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]);
}
// executable. This metadata is in a separate object file from the main
// object file, so we link that in here.
if dylib {
- cmd.arg(obj_filename.with_extension("metadata.o"));
+ cmd.arg(&obj_filename.with_extension("metadata.o"));
}
if t.options.is_like_osx {
cmd.args(&["-dynamiclib", "-Wl,-dylib"]);
if sess.opts.cg.rpath {
- let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec();
- v.push_all(out_filename.filename().unwrap());
- cmd.arg(&v[..]);
+ let mut v = OsString::from_str("-Wl,-install_name,@rpath/");
+ v.push_os_str(out_filename.file_name().unwrap());
+ cmd.arg(&v);
}
} else {
cmd.arg("-shared");
if sess.opts.cg.rpath {
let sysroot = sess.sysroot();
let target_triple = &sess.opts.target_triple;
- let get_install_prefix_lib_path = || {
+ let mut get_install_prefix_lib_path = || {
let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
- let mut path = Path::new(install_prefix);
+ let mut path = PathBuf::new(install_prefix);
path.push(&tlib);
path
};
- let rpath_config = RPathConfig {
+ let mut rpath_config = RPathConfig {
used_crates: sess.cstore.get_used_crates(cstore::RequireDynamic),
- out_filename: out_filename.clone(),
+ out_filename: out_filename.to_path_buf(),
has_rpath: sess.target.target.options.has_rpath,
is_like_osx: sess.target.target.options.is_like_osx,
- get_install_prefix_lib_path: get_install_prefix_lib_path,
- realpath: ::util::fs::realpath
+ get_install_prefix_lib_path: &mut get_install_prefix_lib_path,
+ realpath: &mut ::util::fs::realpath
};
- cmd.args(&rpath::get_rpath_flags(rpath_config));
+ cmd.args(&rpath::get_rpath_flags(&mut rpath_config));
}
// Finally add all the linker arguments provided on the command line along
let search_path = archive_search_paths(sess);
for l in staticlibs {
if takes_hints {
- cmd.arg(format!("-l{}", l));
+ cmd.arg(&format!("-l{}", l));
} else {
// -force_load is the OSX equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
&sess.target.target.options.staticlib_suffix,
&search_path[..],
&sess.diagnostic().handler);
- let mut v = b"-Wl,-force_load,".to_vec();
- v.push_all(lib.as_vec());
- cmd.arg(&v[..]);
+ let mut v = OsString::from_str("-Wl,-force_load,");
+ v.push_os_str(lib.as_os_str());
+ cmd.arg(&v);
}
}
if takes_hints {
for &(ref l, kind) in others {
match kind {
cstore::NativeUnknown => {
- cmd.arg(format!("-l{}", l));
+ cmd.arg(&format!("-l{}", l));
}
cstore::NativeFramework => {
cmd.arg("-framework").arg(&l[..]);
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
match kind {
cstore::RequireDynamic => {
- add_dynamic_crate(cmd, sess, src.dylib.unwrap().0)
+ add_dynamic_crate(cmd, sess, &src.dylib.unwrap().0)
}
cstore::RequireStatic => {
- add_static_crate(cmd, sess, tmpdir, src.rlib.unwrap().0)
+ add_static_crate(cmd, sess, tmpdir, &src.rlib.unwrap().0)
}
}
}
// Converts a library file-stem into a cc -l argument
- fn unlib<'a>(config: &config::Config, stem: &'a [u8]) -> &'a [u8] {
- if stem.starts_with("lib".as_bytes()) && !config.target.options.is_like_windows {
+ fn unlib<'a>(config: &config::Config, stem: &'a str) -> &'a str {
+ if stem.starts_with("lib") && !config.target.options.is_like_windows {
&stem[3..]
} else {
stem
// Adds the static "rlib" versions of all crates to the command line.
fn add_static_crate(cmd: &mut Command, sess: &Session, tmpdir: &Path,
- cratepath: Path) {
+ cratepath: &Path) {
// When performing LTO on an executable output, all of the
// bytecode from the upstream libraries has already been
// included in our object file output. We need to modify all of
// If we're not doing LTO, then our job is simply to just link
// against the archive.
if sess.lto() {
- let name = cratepath.filename_str().unwrap();
+ let name = cratepath.file_name().unwrap().to_str().unwrap();
let name = &name[3..name.len() - 5]; // chop off lib/.rlib
time(sess.time_passes(),
&format!("altering {}.rlib", name),
(), |()| {
- let dst = tmpdir.join(cratepath.filename().unwrap());
+ let dst = tmpdir.join(cratepath.file_name().unwrap());
match fs::copy(&cratepath, &dst) {
Ok(..) => {}
Err(e) => {
// Fix up permissions of the copy, as fs::copy() preserves
// permissions, but the original file may have been installed
// by a package manager and may be read-only.
- match fs::chmod(&dst, old_io::USER_READ | old_io::USER_WRITE) {
+ match fs::metadata(&dst).and_then(|m| {
+ let mut perms = m.permissions();
+ perms.set_readonly(false);
+ fs::set_permissions(&dst, perms)
+ }) {
Ok(..) => {}
Err(e) => {
sess.err(&format!("failed to chmod {} when preparing \
archive.remove_file(&format!("{}.o", name));
let files = archive.files();
if files.iter().any(|s| s.ends_with(".o")) {
- cmd.arg(dst);
+ cmd.arg(&dst);
}
});
} else {
}
// Same thing as above, but for dynamic crates instead of static crates.
- fn add_dynamic_crate(cmd: &mut Command, sess: &Session, cratepath: Path) {
+ fn add_dynamic_crate(cmd: &mut Command, sess: &Session, cratepath: &Path) {
// If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format.
assert!(!sess.lto());
// Just need to tell the linker about where the library lives and
// what its name is
- let dir = cratepath.dirname();
- if !dir.is_empty() { cmd.arg("-L").arg(dir); }
-
- let mut v = "-l".as_bytes().to_vec();
- v.push_all(unlib(&sess.target, cratepath.filestem().unwrap()));
- cmd.arg(&v[..]);
+ if let Some(dir) = cratepath.parent() {
+ cmd.arg("-L").arg(dir);
+ }
+ let filestem = cratepath.file_stem().unwrap().to_str().unwrap();
+ cmd.arg(&format!("-l{}", unlib(&sess.target, filestem)));
}
}
for &(kind, ref lib) in &libs {
match kind {
cstore::NativeUnknown => {
- cmd.arg(format!("-l{}", *lib));
+ cmd.arg(&format!("-l{}", *lib));
}
cstore::NativeFramework => {
cmd.arg("-framework");
};
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
- let file = path.filename_str().unwrap();
+ let file = path.file_name().unwrap().to_str().unwrap();
let file = &file[3..file.len() - 5]; // chop off lib/.rlib
debug!("reading {}", file);
for i in iter::count(0, 1) {
use llvm::SMDiagnosticRef;
use trans::{CrateTranslation, ModuleTranslation};
use util::common::time;
+use util::common::path2cstr;
use syntax::codemap;
use syntax::diagnostic;
use syntax::diagnostic::{Emitter, Handler, Level, mk_handler};
use std::ffi::{CStr, CString};
-use std::old_io::Command;
-use std::old_io::fs;
+use std::fs;
use std::iter::Unfold;
+use std::mem;
+use std::path::Path;
+use std::process::{Command, Stdio};
use std::ptr;
use std::str;
-use std::mem;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::channel;
use std::thread;
output: &Path,
file_type: llvm::FileType) {
unsafe {
- let output_c = CString::new(output.as_vec()).unwrap();
+ let output_c = path2cstr(output);
let result = llvm::LLVMRustWriteOutputFile(
target, pm, m, output_c.as_ptr(), file_type);
if !result {
if config.emit_no_opt_bc {
let ext = format!("{}.no-opt.bc", name_extra);
let out = output_names.with_extension(&ext);
- let out = CString::new(out.as_vec()).unwrap();
+ let out = path2cstr(&out);
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
}
if config.emit_lto_bc {
let name = format!("{}.lto.bc", name_extra);
let out = output_names.with_extension(&name);
- let out = CString::new(out.as_vec()).unwrap();
+ let out = path2cstr(&out);
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
}
},
if config.emit_bc {
let ext = format!("{}.bc", name_extra);
let out = output_names.with_extension(&ext);
- let out = CString::new(out.as_vec()).unwrap();
+ let out = path2cstr(&out);
llvm::LLVMWriteBitcodeToFile(llmod, out.as_ptr());
}
if config.emit_ir {
let ext = format!("{}.ll", name_extra);
let out = output_names.with_extension(&ext);
- let out = CString::new(out.as_vec()).unwrap();
+ let out = path2cstr(&out);
with_codegen(tm, llmod, config.no_builtins, |cpm| {
llvm::LLVMRustPrintModule(cpm, llmod, out.as_ptr());
})
cmd.arg("-nostdlib");
for index in 0..trans.modules.len() {
- cmd.arg(crate_output.with_extension(&format!("{}.o", index)));
+ cmd.arg(&crate_output.with_extension(&format!("{}.o", index)));
}
- cmd.arg("-r")
- .arg("-o")
- .arg(windows_output_path.as_ref().unwrap_or(output_path));
+ cmd.arg("-r").arg("-o")
+ .arg(windows_output_path.as_ref().map(|s| &**s).unwrap_or(output_path));
cmd.args(&sess.target.target.options.post_link_args);
println!("{:?}", &cmd);
}
- cmd.stdin(::std::old_io::process::Ignored)
- .stdout(::std::old_io::process::InheritFd(1))
- .stderr(::std::old_io::process::InheritFd(2));
+ cmd.stdin(Stdio::null());
match cmd.status() {
Ok(status) => {
if !status.success() {
let pname = get_cc_prog(sess);
let mut cmd = Command::new(&pname[..]);
- cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject))
- .arg(outputs.temp_path(config::OutputTypeAssembly));
- debug!("{:?}", &cmd);
+ cmd.arg("-c").arg("-o").arg(&outputs.path(config::OutputTypeObject))
+ .arg(&outputs.temp_path(config::OutputTypeAssembly));
+ debug!("{:?}", cmd);
match cmd.output() {
Ok(prog) => {
pname,
prog.status));
sess.note(&format!("{:?}", &cmd));
- let mut note = prog.error.clone();
- note.push_all(&prog.output);
+ let mut note = prog.stderr.clone();
+ note.push_all(&prog.stdout);
sess.note(str::from_utf8(¬e[..]).unwrap());
sess.abort_if_errors();
}
#![feature(collections)]
#![feature(core)]
#![feature(int_uint)]
-#![feature(old_io)]
#![feature(libc)]
-#![feature(old_path)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(unicode)]
+#![feature(io)]
+#![feature(fs)]
+#![feature(path)]
+#![feature(os)]
+#![feature(tempdir)]
extern crate arena;
extern crate flate;
use middle::ty::{self, Ty};
use std::cell::Cell;
-use std::old_io::{self, File, fs};
use std::env;
+use std::fs::{self, File};
+use std::path::{Path, PathBuf};
use syntax::ast_util::{self, PostExpansionMethod};
use syntax::ast::{self, NodeId, DefId};
}
}
+#[allow(deprecated)]
pub fn process_crate(sess: &Session,
krate: &ast::Crate,
analysis: &ty::CrateAnalysis,
info!("Dumping crate {}", cratename);
// find a path to dump our data to
- let mut root_path = match env::var("DXR_RUST_TEMP_FOLDER") {
- Ok(val) => Path::new(val),
- Err(..) => match odir {
+ let mut root_path = match env::var_os("DXR_RUST_TEMP_FOLDER") {
+ Some(val) => PathBuf::new(&val),
+ None => match odir {
Some(val) => val.join("dxr"),
- None => Path::new("dxr-temp"),
+ None => PathBuf::new("dxr-temp"),
},
};
- match fs::mkdir_recursive(&root_path, old_io::USER_RWX) {
+ match fs::create_dir_all(&root_path) {
Err(e) => sess.err(&format!("Could not create directory {}: {}",
root_path.display(), e)),
_ => (),
// Create output file.
let mut out_name = cratename.clone();
out_name.push_str(".csv");
- root_path.push(out_name);
+ root_path.push(&out_name);
let output_file = match File::create(&root_path) {
Ok(f) => box f,
Err(e) => {
collected_paths: vec!(),
collecting: false,
fmt: FmtStrs::new(box Recorder {
- out: output_file as Box<Writer+'static>,
+ out: output_file,
dump_spans: false,
},
SpanUtils {
use super::escape;
use super::span_utils::SpanUtils;
-use std::vec::Vec;
+use std::io::Write;
use syntax::ast;
use syntax::ast::{NodeId,DefId};
pub struct Recorder {
// output file
- pub out: Box<Writer+'static>,
+ pub out: Box<Write+'static>,
pub dump_spans: bool,
}
let (is_zero, is_signed) = match rhs_t.sty {
ty::ty_int(t) => {
- let zero = C_integral(Type::int_from_ty(cx.ccx(), t), 0u64, false);
+ let zero = C_integral(Type::int_from_ty(cx.ccx(), t), 0, false);
(ICmp(cx, llvm::IntEQ, rhs, zero, debug_loc), true)
}
ty::ty_uint(t) => {
- let zero = C_integral(Type::uint_from_ty(cx.ccx(), t), 0u64, false);
+ let zero = C_integral(Type::uint_from_ty(cx.ccx(), t), 0, false);
(ICmp(cx, llvm::IntEQ, rhs, zero, debug_loc), false)
}
_ => {
let check_overflow = if let Some(v) = tcx.sess.opts.debugging_opts.force_overflow_checks {
v
} else {
- !attr::contains_name(&krate.config, "ndebug")
+ tcx.sess.opts.debug_assertions
};
// Before we touch LLVM, make sure that multithreading is enabled.
let size = ty_size(ty);
if size <= 16 {
let llty = if size == 0 {
- Type::array(&Type::i64(ccx), 0u64)
+ Type::array(&Type::i64(ccx), 0)
} else if size == 1 {
Type::i8(ccx)
} else if size == 2 {
use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
use util::nodemap::{DefIdMap, NodeMap, FnvHashMap, FnvHashSet};
use util::ppaux;
+use util::common::path2cstr;
use libc::{c_uint, c_longlong};
-use std::ffi::CString;
use std::cell::{Cell, RefCell};
+use std::ffi::CString;
+use std::path::Path;
use std::ptr;
use std::rc::{Rc, Weak};
use syntax::util::interner::Interner;
cx.sess().warn("debuginfo: Invalid path to crate's local root source file!");
fallback_path(cx)
} else {
- match abs_path.path_relative_from(work_dir) {
+ match abs_path.relative_from(work_dir) {
Some(ref p) if p.is_relative() => {
- // prepend "./" if necessary
- let dotdot = b"..";
- let prefix: &[u8] = &[dotdot[0], ::std::old_path::SEP_BYTE];
- let mut path_bytes = p.as_vec().to_vec();
-
- if &path_bytes[..2] != prefix &&
- &path_bytes[..2] != dotdot {
- path_bytes.insert(0, prefix[0]);
- path_bytes.insert(1, prefix[1]);
+ if p.starts_with(Path::new("./")) {
+ path2cstr(p)
+ } else {
+ path2cstr(&Path::new(".").join(p))
}
-
- CString::new(path_bytes).unwrap()
}
_ => fallback_path(cx)
}
(option_env!("CFG_VERSION")).expect("CFG_VERSION"));
let compile_unit_name = compile_unit_name.as_ptr();
- let work_dir = CString::new(work_dir.as_vec()).unwrap();
+ let work_dir = path2cstr(&work_dir);
let producer = CString::new(producer).unwrap();
let flags = "\0";
let split_name = "\0";
debug!("file_metadata: {}", full_path);
// FIXME (#9639): This needs to handle non-utf8 paths
- let work_dir = cx.sess().working_dir.as_str().unwrap();
+ let work_dir = cx.sess().working_dir.to_str().unwrap();
let file_name =
if full_path.starts_with(work_dir) {
&full_path[work_dir.len() + 1..full_path.len()]
&format!("use of SIMD type `{}` in FFI is highly experimental and \
may result in invalid code",
pprust::ty_to_string(ast_ty)));
- tcx.sess.span_help(ast_ty.span,
+ tcx.sess.fileline_help(ast_ty.span,
"add #![feature(simd_ffi)] to the crate attributes to enable");
}
};
.position(|item| item.def_id() == method_id)
.unwrap();
let (llfn, ty) =
- trans_object_shim(ccx, data.object_ty, trait_id, method_offset_in_trait);
+ trans_object_shim(ccx,
+ data.object_ty,
+ data.upcast_trait_ref.clone(),
+ method_offset_in_trait);
immediate_rvalue(llfn, ty)
}
_ => {
Callee { bcx: bcx, data: Fn(llfn) }
}
traits::VtableObject(ref data) => {
- let (llfn, _) = trans_object_shim(bcx.ccx(), data.object_ty, trait_id, n_method);
+ let (llfn, _) = trans_object_shim(bcx.ccx(),
+ data.object_ty,
+ data.upcast_trait_ref.clone(),
+ n_method);
Callee { bcx: bcx, data: Fn(llfn) }
}
traits::VtableBuiltin(..) |
pub fn trans_object_shim<'a, 'tcx>(
ccx: &'a CrateContext<'a, 'tcx>,
object_ty: Ty<'tcx>,
- trait_id: ast::DefId,
+ upcast_trait_ref: ty::PolyTraitRef<'tcx>,
method_offset_in_trait: uint)
-> (ValueRef, Ty<'tcx>)
{
let _icx = push_ctxt("trans_object_shim");
let tcx = ccx.tcx();
+ let trait_id = upcast_trait_ref.def_id();
- debug!("trans_object_shim(object_ty={}, trait_id={}, method_offset_in_trait={})",
+ debug!("trans_object_shim(object_ty={}, upcast_trait_ref={}, method_offset_in_trait={})",
object_ty.repr(tcx),
- trait_id.repr(tcx),
+ upcast_trait_ref.repr(tcx),
method_offset_in_trait);
let object_trait_ref =
};
// Upcast to the trait in question and extract out the substitutions.
- let upcast_trait_ref = traits::upcast(ccx.tcx(), object_trait_ref.clone(), trait_id).unwrap();
let upcast_trait_ref = ty::erase_late_bound_regions(tcx, &upcast_trait_ref);
let object_substs = upcast_trait_ref.substs.clone().erase_regions();
debug!("trans_object_shim: object_substs={}", object_substs.repr(tcx));
use rscope::{self, UnelidableRscope, RegionScope, ElidableRscope,
ObjectLifetimeDefaultRscope, ShiftedRscope, BindingRscope};
use util::common::{ErrorReported, FN_OUTPUT_NAME};
-use util::nodemap::DefIdMap;
use util::ppaux::{self, Repr, UserString};
use std::iter::{repeat, AdditiveIterator};
pub trait AstConv<'tcx> {
fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx>;
+ /// Identify the type scheme for an item with a type, like a type
+ /// alias, fn, or struct. This allows you to figure out the set of
+ /// type parameters defined on the item.
fn get_item_type_scheme(&self, span: Span, id: ast::DefId)
-> Result<ty::TypeScheme<'tcx>, ErrorReported>;
+ /// Returns the `TraitDef` for a given trait. This allows you to
+ /// figure out the set of type parameters defined on the trait.
fn get_trait_def(&self, span: Span, id: ast::DefId)
-> Result<Rc<ty::TraitDef<'tcx>>, ErrorReported>;
+ /// Ensure that the super-predicates for the trait with the given
+ /// id are available and also for the transitive set of
+ /// super-predicates.
+ fn ensure_super_predicates(&self, span: Span, id: ast::DefId)
+ -> Result<(), ErrorReported>;
+
+ /// Returns the set of bounds in scope for the type parameter with
+ /// the given id.
fn get_type_parameter_bounds(&self, span: Span, def_id: ast::NodeId)
-> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>;
+ /// Returns true if the trait with id `trait_def_id` defines an
+ /// associated type with the name `name`.
+ fn trait_defines_associated_type_named(&self, trait_def_id: ast::DefId, name: ast::Name)
+ -> bool;
+
/// Return an (optional) substitution to convert bound type parameters that
/// are in scope into free ones. This function should only return Some
/// within a fn body.
}
}
if len == 1 {
- span_help!(this.tcx().sess, default_span,
+ fileline_help!(this.tcx().sess, default_span,
"this function's return type contains a borrowed value, but \
the signature does not say which {} it is borrowed from",
m);
} else if len == 0 {
- span_help!(this.tcx().sess, default_span,
+ fileline_help!(this.tcx().sess, default_span,
"this function's return type contains a borrowed value, but \
there is no value for it to be borrowed from");
- span_help!(this.tcx().sess, default_span,
+ fileline_help!(this.tcx().sess, default_span,
"consider giving it a 'static lifetime");
} else {
- span_help!(this.tcx().sess, default_span,
+ fileline_help!(this.tcx().sess, default_span,
"this function's return type contains a borrowed value, but \
the signature does not say whether it is borrowed from {}",
m);
span_err!(this.tcx().sess, span, E0215,
"angle-bracket notation is not stable when \
used with the `Fn` family of traits, use parentheses");
- span_help!(this.tcx().sess, span,
+ fileline_help!(this.tcx().sess, span,
"add `#![feature(unboxed_closures)]` to \
the crate attributes to enable");
}
span_err!(this.tcx().sess, span, E0216,
"parenthetical notation is only stable when \
used with the `Fn` family of traits");
- span_help!(this.tcx().sess, span,
+ fileline_help!(this.tcx().sess, span,
"add `#![feature(unboxed_closures)]` to \
the crate attributes to enable");
}
// We want to produce `<B as SuperTrait<int>>::T == foo`.
// Simple case: X is defined in the current trait.
- if trait_defines_associated_type_named(this, trait_ref.def_id, binding.item_name) {
+ if this.trait_defines_associated_type_named(trait_ref.def_id, binding.item_name) {
return Ok(ty::ProjectionPredicate {
projection_ty: ty::ProjectionTy {
trait_ref: trait_ref,
tcx.mk_substs(dummy_substs)));
}
+ try!(this.ensure_super_predicates(binding.span, trait_ref.def_id));
+
let mut candidates: Vec<ty::PolyTraitRef> =
traits::supertraits(tcx, trait_ref.to_poly_trait_ref())
- .filter(|r| trait_defines_associated_type_named(this, r.def_id(), binding.item_name))
+ .filter(|r| this.trait_defines_associated_type_named(r.def_id(), binding.item_name))
.collect();
// If converting for an object type, then remove the dummy-ty from `Self` now.
pprust::ty_to_string(ty));
match ty.node {
ast::TyRptr(None, ref mut_ty) => {
- span_help!(this.tcx().sess, ty.span,
+ fileline_help!(this.tcx().sess, ty.span,
"perhaps you meant `&{}({} +{})`? (per RFC 438)",
ppaux::mutability_to_string(mut_ty.mutbl),
pprust::ty_to_string(&*mut_ty.ty),
pprust::bounds_to_string(bounds));
}
ast::TyRptr(Some(ref lt), ref mut_ty) => {
- span_help!(this.tcx().sess, ty.span,
+ fileline_help!(this.tcx().sess, ty.span,
"perhaps you meant `&{} {}({} +{})`? (per RFC 438)",
pprust::lifetime_to_string(lt),
ppaux::mutability_to_string(mut_ty.mutbl),
}
_ => {
- span_help!(this.tcx().sess, ty.span,
+ fileline_help!(this.tcx().sess, ty.span,
"perhaps you forgot parentheses? (per RFC 438)");
}
}
let ty_param_name = tcx.ty_param_defs.borrow()[ty_param_node_id].name;
- // FIXME(#20300) -- search where clauses, not bounds
- let bounds =
- this.get_type_parameter_bounds(span, ty_param_node_id)
- .unwrap_or(Vec::new());
+ let bounds = match this.get_type_parameter_bounds(span, ty_param_node_id) {
+ Ok(v) => v,
+ Err(ErrorReported) => { return (tcx.types.err, ty_path_def); }
+ };
+
+ // ensure the super predicates and stop if we encountered an error
+ if bounds.iter().any(|b| this.ensure_super_predicates(span, b.def_id()).is_err()) {
+ return (this.tcx().types.err, ty_path_def);
+ }
let mut suitable_bounds: Vec<_> =
traits::transitive_bounds(tcx, &bounds)
- .filter(|b| trait_defines_associated_type_named(this, b.def_id(), assoc_name))
+ .filter(|b| this.trait_defines_associated_type_named(b.def_id(), assoc_name))
.collect();
if suitable_bounds.len() == 0 {
(ty, def::DefAssociatedTy(trait_did, item_did))
}
-fn trait_defines_associated_type_named(this: &AstConv,
- trait_def_id: ast::DefId,
- assoc_name: ast::Name)
- -> bool
-{
- let tcx = this.tcx();
- let trait_def = ty::lookup_trait_def(tcx, trait_def_id);
- trait_def.associated_type_names.contains(&assoc_name)
-}
-
fn qpath_to_ty<'tcx>(this: &AstConv<'tcx>,
rscope: &RegionScope,
span: Span,
let tcx = this.tcx();
- let mut ast_ty_to_ty_cache = tcx.ast_ty_to_ty_cache.borrow_mut();
- match ast_ty_to_ty_cache.get(&ast_ty.id) {
- Some(&ty::atttce_resolved(ty)) => return ty,
- Some(&ty::atttce_unresolved) => {
- span_err!(tcx.sess, ast_ty.span, E0246,
- "illegal recursive type; insert an enum \
- or struct in the cycle, if this is \
- desired");
- return this.tcx().types.err;
- }
- None => { /* go on */ }
+ if let Some(&ty) = tcx.ast_ty_to_ty_cache.borrow().get(&ast_ty.id) {
+ return ty;
}
- ast_ty_to_ty_cache.insert(ast_ty.id, ty::atttce_unresolved);
- drop(ast_ty_to_ty_cache);
let typ = match ast_ty.node {
ast::TyVec(ref ty) => {
}
};
- tcx.ast_ty_to_ty_cache.borrow_mut().insert(ast_ty.id, ty::atttce_resolved(typ));
+ tcx.ast_ty_to_ty_cache.borrow_mut().insert(ast_ty.id, typ);
return typ;
}
return ast_region_to_region(tcx, r);
}
+ if let Err(ErrorReported) = this.ensure_super_predicates(span,principal_trait_ref.def_id()) {
+ return ty::ReStatic;
+ }
+
// No explicit region bound specified. Therefore, examine trait
// bounds and see if we can derive region bounds from those.
let derived_region_bounds =
let mut builtin_bounds = ty::empty_builtin_bounds();
let mut region_bounds = Vec::new();
let mut trait_bounds = Vec::new();
- let mut trait_def_ids = DefIdMap();
for ast_bound in ast_bounds {
match *ast_bound {
ast::TraitTyParamBound(ref b, ast::TraitBoundModifier::None) => {
match ::lookup_full_def(tcx, b.trait_ref.path.span, b.trait_ref.ref_id) {
def::DefTrait(trait_did) => {
- match trait_def_ids.get(&trait_did) {
- // Already seen this trait. We forbid
- // duplicates in the list (for some
- // reason).
- Some(span) => {
- span_err!(
- tcx.sess, b.trait_ref.path.span, E0127,
- "trait `{}` already appears in the \
- list of bounds",
- b.trait_ref.path.user_string(tcx));
- tcx.sess.span_note(
- *span,
- "previous appearance is here");
-
- continue;
- }
-
- None => { }
- }
-
- trait_def_ids.insert(trait_did, b.trait_ref.path.span);
-
if ty::try_add_builtin_trait(tcx,
trait_did,
&mut builtin_bounds) {
span_err!(tcx.sess, span, E0174,
"explicit use of unboxed closure method `{}` is experimental",
method);
- span_help!(tcx.sess, span,
+ fileline_help!(tcx.sess, span,
"add `#![feature(unboxed_closures)]` to the crate attributes to enable");
}
}
use syntax::codemap::Span;
use util::common::ErrorReported;
+use util::nodemap::FnvHashSet;
use util::ppaux::Repr;
// Helper functions related to manipulating region types.
stack: Vec<(ty::Region, Option<Ty<'tcx>>)>,
span: Span,
out: Vec<Implication<'tcx>>,
+ visited: FnvHashSet<Ty<'tcx>>,
}
/// This routine computes the well-formedness constraints that must hold for the type `ty` to
body_id: body_id,
span: span,
stack: stack,
- out: Vec::new() };
+ out: Vec::new(),
+ visited: FnvHashSet() };
wf.accumulate_from_ty(ty);
debug!("implications: out={}", wf.out.repr(closure_typer.tcx()));
wf.out
debug!("accumulate_from_ty(ty={})",
ty.repr(self.tcx()));
+ // When expanding out associated types, we can visit a cyclic
+ // set of types. Issue #23003.
+ if !self.visited.insert(ty) {
+ return;
+ }
+
match ty.sty {
ty::ty_bool |
ty::ty_char |
target_trait_def_id: ast::DefId)
-> ty::PolyTraitRef<'tcx>
{
- match traits::upcast(self.tcx(), source_trait_ref.clone(), target_trait_def_id) {
- Some(super_trait_ref) => super_trait_ref,
- None => {
- self.tcx().sess.span_bug(
- self.span,
- &format!("cannot upcast `{}` to `{}`",
- source_trait_ref.repr(self.tcx()),
- target_trait_def_id.repr(self.tcx())));
- }
+ let upcast_trait_refs = traits::upcast(self.tcx(),
+ source_trait_ref.clone(),
+ target_trait_def_id);
+
+ // must be exactly one trait ref or we'd get an ambig error etc
+ if upcast_trait_refs.len() != 1 {
+ self.tcx().sess.span_bug(
+ self.span,
+ &format!("cannot uniquely upcast `{}` to `{}`: `{}`",
+ source_trait_ref.repr(self.tcx()),
+ target_trait_def_id.repr(self.tcx()),
+ upcast_trait_refs.repr(self.tcx())));
}
+
+ upcast_trait_refs.into_iter().next().unwrap()
}
fn replace_late_bound_regions_with_fresh_var<T>(&self, value: &ty::Binder<T>) -> T
debug!("elaborate_bounds(bounds={})", bounds.repr(self.tcx()));
let tcx = self.tcx();
- let mut cache = HashSet::new();
for bound_trait_ref in traits::transitive_bounds(tcx, bounds) {
- // Already visited this trait, skip it.
- if !cache.insert(bound_trait_ref.def_id()) {
- continue;
- }
-
let (pos, method) = match trait_method(tcx,
bound_trait_ref.def_id(),
self.method_name) {
fn to_trait_data(&self) -> Option<(ast::DefId,MethodIndex)> {
match self.kind {
- InherentImplCandidate(..) |
- ObjectCandidate(..) => {
+ InherentImplCandidate(..) => {
None
}
+ ObjectCandidate(trait_def_id, method_num, _) => {
+ Some((trait_def_id, method_num))
+ }
ClosureCandidate(trait_def_id, method_num) => {
Some((trait_def_id, method_num))
}
Ok(ty::lookup_trait_def(self.tcx(), id))
}
+ fn ensure_super_predicates(&self, _: Span, _: ast::DefId) -> Result<(), ErrorReported> {
+ // all super predicates are ensured during collect pass
+ Ok(())
+ }
+
fn get_free_substs(&self) -> Option<&Substs<'tcx>> {
Some(&self.inh.param_env.free_substs)
}
Ok(r)
}
+ fn trait_defines_associated_type_named(&self,
+ trait_def_id: ast::DefId,
+ assoc_name: ast::Name)
+ -> bool
+ {
+ let trait_def = ty::lookup_trait_def(self.ccx.tcx, trait_def_id);
+ trait_def.associated_type_names.contains(&assoc_name)
+ }
+
fn ty_infer(&self, _span: Span) -> Ty<'tcx> {
self.infcx().next_ty_var()
}
},
expr_t, None);
- tcx.sess.span_help(field.span,
+ tcx.sess.fileline_help(field.span,
"maybe a `()` to call it is missing? \
If not, try an anonymous function");
} else {
span_err!(tcx.sess, sp, E0073,
"this type cannot be instantiated without an \
instance of itself");
- span_help!(tcx.sess, sp, "consider using `Option<{}>`",
+ fileline_help!(tcx.sess, sp, "consider using `Option<{}>`",
ppaux::ty_to_string(tcx, item_ty));
false
} else {
// Find the supertrait bounds. This will add `int:Bar`.
let poly_trait_ref = ty::Binder(trait_ref);
- let predicates = ty::predicates_for_trait_ref(fcx.tcx(), &poly_trait_ref);
+ let predicates = ty::lookup_super_predicates(fcx.tcx(), poly_trait_ref.def_id());
+ let predicates = predicates.instantiate_supertrait(fcx.tcx(), &poly_trait_ref);
let predicates = {
let selcx = &mut traits::SelectionContext::new(fcx.infcx(), fcx);
traits::normalize(selcx, cause.clone(), &predicates)
};
- for predicate in predicates.value {
+ for predicate in predicates.value.predicates {
fcx.register_predicate(traits::Obligation::new(cause.clone(), predicate));
}
for obligation in predicates.obligations {
match suggested_marker_id {
Some(def_id) => {
- self.tcx().sess.span_help(
+ self.tcx().sess.fileline_help(
span,
format!("consider removing `{}` or using a marker such as `{}`",
param_name.user_string(self.tcx()),
return // everything OK
};
span_err!(tcx.sess, sp, E0183, "manual implementations of `{}` are experimental", trait_name);
- span_help!(tcx.sess, sp,
+ fileline_help!(tcx.sess, sp,
"add `#![feature(unboxed_closures)]` to the crate attributes to enable");
}
core type along with a list of the bounds for each parameter. Type
parameters themselves are represented as `ty_param()` instances.
-The phasing of type conversion is somewhat complicated. There are a
-number of possible cycles that can arise.
-
-Converting types can require:
-
-1. `Foo<X>` where `Foo` is a type alias, or trait requires knowing:
- - number of region / type parameters
- - for type parameters, `T:'a` annotations to control defaults for object lifetimes
- - defaults for type parameters (which are themselves types!)
-2. `Foo<X>` where `Foo` is a type alias requires knowing what `Foo` expands to
-3. Translating `SomeTrait` with no explicit lifetime bound requires knowing
- - supertraits of `SomeTrait`
-4. Translating `T::X` (vs `<T as Trait>::X`) requires knowing
- - bounds on `T`
- - supertraits of those bounds
-
-So as you can see, in general translating types requires knowing the
-trait hierarchy. But this gets a bit tricky because translating the
-trait hierarchy requires converting the types that appear in trait
-references. One potential saving grace is that in general knowing the
-trait hierarchy is only necessary for shorthands like `T::X` or
-handling omitted lifetime bounds on object types. Therefore, if we are
-lazy about expanding out the trait hierachy, users can sever cycles if
-necessary. Lazy expansion is also needed for type aliases.
-
-This system is not perfect yet. Currently, we "convert" types and
-traits in three phases (note that conversion only affects the types of
-items / enum variants / methods; it does not e.g. compute the types of
-individual expressions):
+The phasing of type conversion is somewhat complicated. There is no
+clear set of phases we can enforce (e.g., converting traits first,
+then types, or something like that) because the user can introduce
+arbitrary interdependencies. So instead we generally convert things
+lazilly and on demand, and include logic that checks for cycles.
+Demand is driven by calls to `AstConv::get_item_type_scheme` or
+`AstConv::lookup_trait_def`.
+
+Currently, we "convert" types and traits in three phases (note that
+conversion only affects the types of items / enum variants / methods;
+it does not e.g. compute the types of individual expressions):
0. Intrinsics
1. Trait definitions
and invoking an appropriate function (e.g., `trait_def_of_item` or
`convert_item`). However, it is possible that while converting an
item, we may need to compute the *type scheme* or *trait definition*
-for other items. This is a kind of shallow conversion that is
-triggered on demand by calls to `AstConv::get_item_type_scheme` or
-`AstConv::lookup_trait_def`. It is possible for cycles to result from
-this (e.g., `type A = B; type B = A;`), in which case astconv
-(currently) reports the error.
+for other items.
There are some shortcomings in this design:
-- Cycles through trait definitions (e.g. supertraits) are not currently
- detected by astconv. (#12511)
+- Before walking the set of supertraits for a given trait, you must
+ call `ensure_super_predicates` on that trait def-id. Otherwise,
+ `lookup_super_predicates` will result in ICEs.
- Because the type scheme includes defaults, cycles through type
parameter defaults are illegal even if those defaults are never
employed. This is not necessarily a bug.
enum AstConvRequest {
GetItemTypeScheme(ast::DefId),
GetTraitDef(ast::DefId),
+ EnsureSuperPredicates(ast::DefId),
GetTypeParameterBounds(ast::NodeId),
}
request: AstConvRequest,
code: F)
-> Result<R,ErrorReported>
- where F: FnOnce() -> R
+ where F: FnOnce() -> Result<R,ErrorReported>
{
{
let mut stack = self.stack.borrow_mut();
let result = code();
self.stack.borrow_mut().pop();
- Ok(result)
+ result
}
fn report_cycle(&self,
&format!("the cycle begins when processing `{}`...",
ty::item_path_str(tcx, def_id)));
}
+ AstConvRequest::EnsureSuperPredicates(def_id) => {
+ tcx.sess.note(
+ &format!("the cycle begins when computing the supertraits of `{}`...",
+ ty::item_path_str(tcx, def_id)));
+ }
AstConvRequest::GetTypeParameterBounds(id) => {
let def = tcx.type_parameter_def(id);
tcx.sess.note(
&format!("...which then requires processing `{}`...",
ty::item_path_str(tcx, def_id)));
}
+ AstConvRequest::EnsureSuperPredicates(def_id) => {
+ tcx.sess.note(
+ &format!("...which then requires computing the supertraits of `{}`...",
+ ty::item_path_str(tcx, def_id)));
+ }
AstConvRequest::GetTypeParameterBounds(id) => {
let def = tcx.type_parameter_def(id);
tcx.sess.note(
&format!("...which then again requires processing `{}`, completing the cycle.",
ty::item_path_str(tcx, def_id)));
}
+ AstConvRequest::EnsureSuperPredicates(def_id) => {
+ tcx.sess.note(
+ &format!("...which then again requires computing the supertraits of `{}`, \
+ completing the cycle.",
+ ty::item_path_str(tcx, def_id)));
+ }
AstConvRequest::GetTypeParameterBounds(id) => {
let def = tcx.type_parameter_def(id);
tcx.sess.note(
}
}
}
+
+ /// Loads the trait def for a given trait, returning ErrorReported if a cycle arises.
+ fn get_trait_def(&self, trait_id: ast::DefId)
+ -> Rc<ty::TraitDef<'tcx>>
+ {
+ let tcx = self.tcx;
+
+ if trait_id.krate != ast::LOCAL_CRATE {
+ return ty::lookup_trait_def(tcx, trait_id)
+ }
+
+ let item = match tcx.map.get(trait_id.node) {
+ ast_map::NodeItem(item) => item,
+ _ => tcx.sess.bug(&format!("get_trait_def({}): not an item", trait_id.repr(tcx)))
+ };
+
+ trait_def_of_item(self, &*item)
+ }
+
+ /// Ensure that the (transitive) super predicates for
+ /// `trait_def_id` are available. This will report a cycle error
+ /// if a trait `X` (transitively) extends itself in some form.
+ fn ensure_super_predicates(&self, span: Span, trait_def_id: ast::DefId)
+ -> Result<(), ErrorReported>
+ {
+ self.cycle_check(span, AstConvRequest::EnsureSuperPredicates(trait_def_id), || {
+ let def_ids = ensure_super_predicates_step(self, trait_def_id);
+
+ for def_id in def_ids {
+ try!(self.ensure_super_predicates(span, def_id));
+ }
+
+ Ok(())
+ })
+ }
}
impl<'a,'tcx> ItemCtxt<'a,'tcx> {
-> Result<ty::TypeScheme<'tcx>, ErrorReported>
{
self.ccx.cycle_check(span, AstConvRequest::GetItemTypeScheme(id), || {
- type_scheme_of_def_id(self.ccx, id)
+ Ok(type_scheme_of_def_id(self.ccx, id))
})
}
-> Result<Rc<ty::TraitDef<'tcx>>, ErrorReported>
{
self.ccx.cycle_check(span, AstConvRequest::GetTraitDef(id), || {
- get_trait_def(self.ccx, id)
+ Ok(self.ccx.get_trait_def(id))
})
}
+ fn ensure_super_predicates(&self,
+ span: Span,
+ trait_def_id: ast::DefId)
+ -> Result<(), ErrorReported>
+ {
+ debug!("ensure_super_predicates(trait_def_id={})",
+ trait_def_id.repr(self.tcx()));
+
+ self.ccx.ensure_super_predicates(span, trait_def_id)
+ }
+
+
fn get_type_parameter_bounds(&self,
span: Span,
node_id: ast::NodeId)
-> Result<Vec<ty::PolyTraitRef<'tcx>>, ErrorReported>
{
self.ccx.cycle_check(span, AstConvRequest::GetTypeParameterBounds(node_id), || {
- self.param_bounds.get_type_parameter_bounds(self, span, node_id)
+ let v = self.param_bounds.get_type_parameter_bounds(self, span, node_id)
+ .into_iter()
+ .filter_map(|p| p.to_opt_poly_trait_ref())
+ .collect();
+ Ok(v)
})
}
+ fn trait_defines_associated_type_named(&self,
+ trait_def_id: ast::DefId,
+ assoc_name: ast::Name)
+ -> bool
+ {
+ if trait_def_id.krate == ast::LOCAL_CRATE {
+ trait_defines_associated_type_named(self.ccx, trait_def_id.node, assoc_name)
+ } else {
+ let trait_def = ty::lookup_trait_def(self.tcx(), trait_def_id);
+ trait_def.associated_type_names.contains(&assoc_name)
+ }
+ }
+
fn ty_infer(&self, span: Span) -> Ty<'tcx> {
span_err!(self.tcx().sess, span, E0121,
"the type placeholder `_` is not allowed within types on item signatures");
astconv: &AstConv<'tcx>,
span: Span,
node_id: ast::NodeId)
- -> Vec<ty::PolyTraitRef<'tcx>>;
+ -> Vec<ty::Predicate<'tcx>>;
}
/// Find bounds from both elements of the tuple.
astconv: &AstConv<'tcx>,
span: Span,
node_id: ast::NodeId)
- -> Vec<ty::PolyTraitRef<'tcx>>
+ -> Vec<ty::Predicate<'tcx>>
{
let mut v = self.0.get_type_parameter_bounds(astconv, span, node_id);
v.extend(self.1.get_type_parameter_bounds(astconv, span, node_id).into_iter());
_astconv: &AstConv<'tcx>,
_span: Span,
_node_id: ast::NodeId)
- -> Vec<ty::PolyTraitRef<'tcx>>
+ -> Vec<ty::Predicate<'tcx>>
{
Vec::new()
}
astconv: &AstConv<'tcx>,
_span: Span,
node_id: ast::NodeId)
- -> Vec<ty::PolyTraitRef<'tcx>>
+ -> Vec<ty::Predicate<'tcx>>
{
let def = astconv.tcx().type_parameter_def(node_id);
self.predicates
.iter()
- .filter_map(|predicate| {
- match *predicate {
+ .filter(|predicate| {
+ match **predicate {
ty::Predicate::Trait(ref data) => {
- if data.0.self_ty().is_param(def.space, def.index) {
- Some(data.to_poly_trait_ref())
- } else {
- None
- }
+ data.skip_binder().self_ty().is_param(def.space, def.index)
+ }
+ ty::Predicate::TypeOutlives(ref data) => {
+ data.skip_binder().0.is_param(def.space, def.index)
}
ty::Predicate::Equate(..) |
ty::Predicate::RegionOutlives(..) |
- ty::Predicate::TypeOutlives(..) |
ty::Predicate::Projection(..) => {
- None
+ false
}
}
})
+ .cloned()
.collect()
}
}
astconv: &AstConv<'tcx>,
_: Span,
node_id: ast::NodeId)
- -> Vec<ty::PolyTraitRef<'tcx>>
+ -> Vec<ty::Predicate<'tcx>>
{
// In the AST, bounds can derive from two places. Either
// written inline like `<T:Foo>` or in a where clause like
.iter()
.filter(|p| p.id == node_id)
.flat_map(|p| p.bounds.iter())
- .filter_map(|b| poly_trait_ref_from_bound(astconv, ty, b, &mut Vec::new()));
+ .flat_map(|b| predicates_from_bound(astconv, ty, b).into_iter());
let from_where_clauses =
self.where_clause
})
.filter(|bp| is_param(astconv.tcx(), &bp.bounded_ty, node_id))
.flat_map(|bp| bp.bounds.iter())
- .filter_map(|b| poly_trait_ref_from_bound(astconv, ty, b, &mut Vec::new()));
+ .flat_map(|b| predicates_from_bound(astconv, ty, b).into_iter());
from_ty_params.chain(from_where_clauses).collect()
}
{
if let ast::TyPath(None, _) = ast_ty.node {
let path_res = tcx.def_map.borrow()[ast_ty.id];
- if let def::DefTyParam(_, _, def_id, _) = path_res.base_def {
- path_res.depth == 0 && def_id == local_def(param_id)
- } else {
- false
+ match path_res.base_def {
+ def::DefSelfTy(node_id) =>
+ path_res.depth == 0 && node_id == param_id,
+
+ def::DefTyParam(_, _, def_id, _) =>
+ path_res.depth == 0 && def_id == local_def(param_id),
+
+ _ =>
+ false,
}
} else {
false
rcvr_visibility: ast::Visibility)
where I: Iterator<Item=&'i ast::Method>
{
- debug!("convert_methods(untransformed_rcvr_ty={}, rcvr_ty_generics={})",
+ debug!("convert_methods(untransformed_rcvr_ty={}, rcvr_ty_generics={}, rcvr_ty_predicates={})",
untransformed_rcvr_ty.repr(ccx.tcx),
- rcvr_ty_generics.repr(ccx.tcx));
+ rcvr_ty_generics.repr(ccx.tcx),
+ rcvr_ty_predicates.repr(ccx.tcx));
let tcx = ccx.tcx;
let mut seen_methods = FnvHashSet();
},
ast::ItemTrait(_, _, _, ref trait_items) => {
let trait_def = trait_def_of_item(ccx, it);
+ let _: Result<(), ErrorReported> = // any error is already reported, can ignore
+ ccx.ensure_super_predicates(it.span, local_def(it.id));
convert_trait_predicates(ccx, it);
let trait_predicates = ty::lookup_predicates(ccx.tcx, local_def(it.id));
}
}
-fn get_trait_def<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
- trait_id: ast::DefId)
- -> Rc<ty::TraitDef<'tcx>> {
+/// Ensures that the super-predicates of the trait with def-id
+/// trait_def_id are converted and stored. This does NOT ensure that
+/// the transitive super-predicates are converted; that is the job of
+/// the `ensure_super_predicates()` method in the `AstConv` impl
+/// above. Returns a list of trait def-ids that must be ensured as
+/// well to guarantee that the transitive superpredicates are
+/// converted.
+fn ensure_super_predicates_step(ccx: &CrateCtxt,
+ trait_def_id: ast::DefId)
+ -> Vec<ast::DefId>
+{
let tcx = ccx.tcx;
- if trait_id.krate != ast::LOCAL_CRATE {
- return ty::lookup_trait_def(tcx, trait_id)
- }
+ debug!("ensure_super_predicates_step(trait_def_id={})", trait_def_id.repr(tcx));
- match tcx.map.get(trait_id.node) {
- ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item),
- _ => {
- tcx.sess.bug(&format!("get_trait_def({}): not an item",
- trait_id.node))
- }
+ if trait_def_id.krate != ast::LOCAL_CRATE {
+ // If this trait comes from an external crate, then all of the
+ // supertraits it may depend on also must come from external
+ // crates, and hence all of them already have their
+ // super-predicates "converted" (and available from crate
+ // meta-data), so there is no need to transitively test them.
+ return Vec::new();
}
+
+ let superpredicates = tcx.super_predicates.borrow().get(&trait_def_id).cloned();
+ let superpredicates = superpredicates.unwrap_or_else(|| {
+ let trait_node_id = trait_def_id.node;
+
+ let item = match ccx.tcx.map.get(trait_node_id) {
+ ast_map::NodeItem(item) => item,
+ _ => ccx.tcx.sess.bug(&format!("trait_node_id {} is not an item", trait_node_id))
+ };
+
+ let (generics, bounds) = match item.node {
+ ast::ItemTrait(_, ref generics, ref supertraits, _) => (generics, supertraits),
+ _ => tcx.sess.span_bug(item.span,
+ "ensure_super_predicates_step invoked on non-trait"),
+ };
+
+ // In-scope when converting the superbounds for `Trait` are
+ // that `Self:Trait` as well as any bounds that appear on the
+ // generic types:
+ let trait_def = trait_def_of_item(ccx, item);
+ let self_predicate = ty::GenericPredicates {
+ predicates: VecPerParamSpace::new(vec![],
+ vec![trait_def.trait_ref.as_predicate()],
+ vec![])
+ };
+ let scope = &(generics, &self_predicate);
+
+ // Convert the bounds that follow the colon, e.g. `Bar+Zed` in `trait Foo : Bar+Zed`.
+ let self_param_ty = ty::mk_self_type(tcx);
+ let superbounds1 = compute_bounds(&ccx.icx(scope), self_param_ty, bounds,
+ SizedByDefault::No, item.span);
+ let superbounds1 = ty::predicates(tcx, self_param_ty, &superbounds1);
+
+ // Convert any explicit superbounds in the where clause,
+ // e.g. `trait Foo where Self : Bar`:
+ let superbounds2 = generics.get_type_parameter_bounds(&ccx.icx(scope), item.span, item.id);
+
+ // Combine the two lists to form the complete set of superbounds:
+ let superbounds = superbounds1.into_iter().chain(superbounds2.into_iter()).collect();
+ let superpredicates = ty::GenericPredicates {
+ predicates: VecPerParamSpace::new(superbounds, vec![], vec![])
+ };
+ debug!("superpredicates for trait {} = {}",
+ local_def(item.id).repr(ccx.tcx),
+ superpredicates.repr(ccx.tcx));
+
+ tcx.super_predicates.borrow_mut().insert(trait_def_id, superpredicates.clone());
+
+ superpredicates
+ });
+
+ let def_ids: Vec<_> = superpredicates.predicates
+ .iter()
+ .filter_map(|p| p.to_opt_poly_trait_ref())
+ .map(|tr| tr.def_id())
+ .collect();
+
+ debug!("ensure_super_predicates_step: def_ids={}", def_ids.repr(tcx));
+
+ def_ids
}
fn trait_def_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
return def.clone();
}
- let (unsafety, generics, bounds, items) = match it.node {
- ast::ItemTrait(unsafety,
- ref generics,
- ref supertraits,
- ref items) => {
- (unsafety, generics, supertraits, items)
- }
- ref s => {
- tcx.sess.span_bug(
- it.span,
- &format!("trait_def_of_item invoked on {:?}", s));
- }
+ let (unsafety, generics, items) = match it.node {
+ ast::ItemTrait(unsafety, ref generics, _, ref items) => (unsafety, generics, items),
+ _ => tcx.sess.span_bug(it.span, "trait_def_of_item invoked on non-trait"),
};
let paren_sugar = ty::has_attr(tcx, def_id, "rustc_paren_sugar");
it.span,
"the `#[rustc_paren_sugar]` attribute is a temporary means of controlling \
which traits can use parenthetical notation");
- span_help!(ccx.tcx.sess, it.span,
+ fileline_help!(ccx.tcx.sess, it.span,
"add `#![feature(unboxed_closures)]` to \
the crate attributes to use it");
}
let ty_generics = ty_generics_for_trait(ccx, it.id, substs, generics);
- let self_param_ty = ty::ParamTy::for_self().to_ty(ccx.tcx);
-
- // supertraits:
- let bounds = compute_bounds(&ccx.icx(generics),
- self_param_ty,
- bounds,
- SizedByDefault::No,
- it.span);
-
let associated_type_names: Vec<_> =
items.iter()
.filter_map(|item| {
paren_sugar: paren_sugar,
unsafety: unsafety,
generics: ty_generics,
- bounds: bounds,
trait_ref: trait_ref,
associated_type_names: associated_type_names,
});
}
}
+fn trait_defines_associated_type_named(ccx: &CrateCtxt,
+ trait_node_id: ast::NodeId,
+ assoc_name: ast::Name)
+ -> bool
+{
+ let item = match ccx.tcx.map.get(trait_node_id) {
+ ast_map::NodeItem(item) => item,
+ _ => ccx.tcx.sess.bug(&format!("trait_node_id {} is not an item", trait_node_id))
+ };
+
+ let trait_items = match item.node {
+ ast::ItemTrait(_, _, _, ref trait_items) => trait_items,
+ _ => ccx.tcx.sess.bug(&format!("trait_node_id {} is not a trait", trait_node_id))
+ };
+
+ trait_items.iter()
+ .any(|trait_item| {
+ match *trait_item {
+ ast::TypeTraitItem(ref t) => t.ty_param.ident.name == assoc_name,
+ ast::RequiredMethod(..) | ast::ProvidedMethod(..) => false,
+ }
+ })
+}
+
fn convert_trait_predicates<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, it: &ast::Item) {
let tcx = ccx.tcx;
let trait_def = trait_def_of_item(ccx, it);
}
};
- let self_param_ty = ty::ParamTy::for_self().to_ty(ccx.tcx);
-
- let super_predicates = ty::predicates(ccx.tcx, self_param_ty, &trait_def.bounds);
+ let super_predicates = ty::lookup_super_predicates(ccx.tcx, def_id);
// `ty_generic_predicates` below will consider the bounds on the type
// parameters (including `Self`) and the explicit where-clauses,
// but to get the full set of predicates on a trait we need to add
// in the supertrait bounds and anything declared on the
// associated types.
- let mut base_predicates =
- ty::GenericPredicates {
- predicates: VecPerParamSpace::new(super_predicates, vec![], vec![])
- };
+ let mut base_predicates = super_predicates;
// Add in a predicate that `Self:Trait` (where `Trait` is the
// current trait). This is needed for builtin bounds.
}
}
-enum SizedByDefault { Yes, No }
+enum SizedByDefault { Yes, No, }
/// Translate the AST's notion of ty param bounds (which are an enum consisting of a newtyped Ty or
/// a region) to ty's notion of ty param bounds, which can either be user-defined traits, or the
&mut param_bounds.builtin_bounds,
ast_bounds,
span);
-
- check_bounds_compatible(astconv,
- param_ty,
- ¶m_bounds,
- span);
}
param_bounds.trait_bounds.sort_by(|a,b| a.def_id().cmp(&b.def_id()));
param_bounds
}
-fn check_bounds_compatible<'tcx>(astconv: &AstConv<'tcx>,
- param_ty: Ty<'tcx>,
- param_bounds: &ty::ParamBounds<'tcx>,
- span: Span) {
- let tcx = astconv.tcx();
- if !param_bounds.builtin_bounds.contains(&ty::BoundSized) {
- ty::each_bound_trait_and_supertraits(
- tcx,
- ¶m_bounds.trait_bounds,
- |trait_ref| {
- match astconv.get_trait_def(span, trait_ref.def_id()) {
- Ok(trait_def) => {
- if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) {
- span_err!(tcx.sess, span, E0129,
- "incompatible bounds on `{}`, \
- bound `{}` does not allow unsized type",
- param_ty.user_string(tcx),
- trait_ref.user_string(tcx));
- }
- }
- Err(ErrorReported) => { }
- }
- true
- });
- }
-}
-
-/// Converts a specific TyParamBound from the AST into the
-/// appropriate poly-trait-reference.
-fn poly_trait_ref_from_bound<'tcx>(astconv: &AstConv<'tcx>,
- param_ty: Ty<'tcx>,
- bound: &ast::TyParamBound,
- projections: &mut Vec<ty::PolyProjectionPredicate<'tcx>>)
- -> Option<ty::PolyTraitRef<'tcx>>
+/// Converts a specific TyParamBound from the AST into a set of
+/// predicates that apply to the self-type. A vector is returned
+/// because this can be anywhere from 0 predicates (`T:?Sized` adds no
+/// predicates) to 1 (`T:Foo`) to many (`T:Bar<X=i32>` adds `T:Bar`
+/// and `<T as Bar>::X == i32`).
+fn predicates_from_bound<'tcx>(astconv: &AstConv<'tcx>,
+ param_ty: Ty<'tcx>,
+ bound: &ast::TyParamBound)
+ -> Vec<ty::Predicate<'tcx>>
{
match *bound {
ast::TraitTyParamBound(ref tr, ast::TraitBoundModifier::None) => {
- Some(conv_poly_trait_ref(astconv, param_ty, tr, projections))
+ let mut projections = Vec::new();
+ let pred = conv_poly_trait_ref(astconv, param_ty, tr, &mut projections);
+ projections.into_iter()
+ .map(|p| p.as_predicate())
+ .chain(Some(pred.as_predicate()).into_iter())
+ .collect()
}
- ast::TraitTyParamBound(_, ast::TraitBoundModifier::Maybe) |
- ast::RegionTyParamBound(_) => {
- None
+ ast::RegionTyParamBound(ref lifetime) => {
+ let region = ast_region_to_region(astconv.tcx(), lifetime);
+ let pred = ty::Binder(ty::OutlivesPredicate(param_ty, region));
+ vec![ty::Predicate::TypeOutlives(pred)]
+ }
+ ast::TraitTyParamBound(_, ast::TraitBoundModifier::Maybe) => {
+ Vec::new()
}
}
}
ast::ItemTrait(..) => {
let trait_def = ty::lookup_trait_def(tcx, did);
- let predicates = ty::predicates(tcx, ty::mk_self_type(tcx), &trait_def.bounds);
+ let predicates = ty::lookup_super_predicates(tcx, did);
self.add_constraints_from_predicates(&trait_def.generics,
- &predicates,
+ predicates.predicates.as_slice(),
self.covariant);
let trait_items = ty::trait_items(tcx, did);
_ => unreachable!()
}
});
- let trait_def = ty::lookup_trait_def(tcx, did);
let predicates = ty::lookup_predicates(tcx, did);
- let bounds = trait_def.bounds.clean(cx);
clean::Trait {
unsafety: def.unsafety,
generics: (&def.generics, &predicates, subst::TypeSpace).clean(cx),
items: items.collect(),
- bounds: bounds,
+ bounds: vec![], // supertraits can be found in the list of predicates
}
}
use std::rc::Rc;
use std::u32;
-use std::old_path::Path as FsPath; // Conflicts with Path struct
+use std::path::PathBuf;
use core::DocContext;
use doctree;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Crate {
pub name: String,
- pub src: FsPath,
+ pub src: PathBuf,
pub module: Option<Item>,
pub externs: Vec<(ast::CrateNum, ExternalCrate)>,
pub primitives: Vec<PrimitiveType>,
let src = match cx.input {
Input::File(ref path) => path.clone(),
- Input::Str(_) => FsPath::new("") // FIXME: this is wrong
+ Input::Str(_) => PathBuf::new("") // FIXME: this is wrong
};
Crate {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::{old_io, str};
+use std::fs::File;
+use std::io::prelude::*;
+use std::io;
+use std::old_io;
+use std::path::{PathBuf, Path};
+use std::str;
#[derive(Clone)]
pub struct ExternalHtml{
}
}
-pub fn load_string(input: &Path) -> old_io::IoResult<Option<String>> {
- let mut f = try!(old_io::File::open(input));
- let d = try!(f.read_to_end());
+pub fn load_string(input: &Path) -> io::Result<Option<String>> {
+ let mut f = try!(File::open(input));
+ let mut d = Vec::new();
+ try!(f.read_to_end(&mut d));
Ok(str::from_utf8(&d).map(|s| s.to_string()).ok())
}
macro_rules! load_or_return {
($input: expr, $cant_read: expr, $not_utf8: expr) => {
{
- let input = Path::new($input);
+ let input = PathBuf::new($input);
match ::externalfiles::load_string(&input) {
Err(e) => {
let _ = writeln!(&mut old_io::stderr(),
#[cfg(unix)]
mod imp {
- use std::ffi::CString;
+ use std::ffi::{AsOsStr, CString};
+ use std::os::unix::prelude::*;
+ use std::path::Path;
use libc;
use std::os as stdos;
impl Lock {
pub fn new(p: &Path) -> Lock {
- let buf = CString::new(p.as_vec()).unwrap();
+ let buf = CString::new(p.as_os_str().as_bytes()).unwrap();
let fd = unsafe {
libc::open(buf.as_ptr(), libc::O_RDWR | libc::O_CREAT,
libc::S_IRWXU)
#[cfg(windows)]
mod imp {
use libc;
+ use std::ffi::AsOsStr;
use std::mem;
+ use std::os::windows::prelude::*;
use std::os;
+ use std::path::Path;
use std::ptr;
const LOCKFILE_EXCLUSIVE_LOCK: libc::DWORD = 0x00000002;
impl Lock {
pub fn new(p: &Path) -> Lock {
- let mut p_16: Vec<u16> = p.as_str().unwrap().utf16_units().collect();
+ let mut p_16: Vec<_> = p.as_os_str().encode_wide().collect();
p_16.push(0);
let handle = unsafe {
libc::CreateFileW(p_16.as_ptr(),
// except according to those terms.
use std::fmt;
-use std::old_io;
+use std::io::prelude::*;
+use std::io;
use externalfiles::ExternalHtml;
}
pub fn render<T: fmt::Display, S: fmt::Display>(
- dst: &mut old_io::Writer, layout: &Layout, page: &Page, sidebar: &S, t: &T)
- -> old_io::IoResult<()>
+ dst: &mut io::Write, layout: &Layout, page: &Page, sidebar: &S, t: &T)
+ -> io::Result<()>
{
write!(dst,
r##"<!DOCTYPE html>
)
}
-pub fn redirect(dst: &mut old_io::Writer, url: &str) -> old_io::IoResult<()> {
+pub fn redirect(dst: &mut io::Write, url: &str) -> io::Result<()> {
// <script> triggers a redirect before refresh, so this is fine.
write!(dst,
r##"<!DOCTYPE html>
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::default::Default;
+use std::ffi::OsStr;
use std::fmt;
-use std::old_io::fs::PathExtensions;
-use std::old_io::{fs, File, BufferedWriter, BufferedReader};
-use std::old_io;
+use std::fs::{self, File};
+use std::io::prelude::*;
+use std::io::{self, BufWriter, BufReader};
use std::iter::repeat;
+use std::path::{PathBuf, Path};
use std::str;
use std::sync::Arc;
use html::layout;
use html::markdown::Markdown;
use html::markdown;
-use html::escape::Escape;
use stability_summary;
/// A pair of name and its optional document.
-#[derive(Clone, Eq, Ord, PartialEq, PartialOrd)]
-pub struct NameDoc(String, Option<String>);
+pub type NameDoc = (String, Option<String>);
/// Major driving force in all rustdoc rendering. This contains information
/// about where in the tree-like hierarchy rendering is occurring and controls
pub root_path: String,
/// The path to the crate root source minus the file name.
/// Used for simplifying paths to the highlighted source code files.
- pub src_root: Path,
+ pub src_root: PathBuf,
/// The current destination folder of where HTML artifacts should be placed.
/// This changes as the context descends into the module hierarchy.
- pub dst: Path,
+ pub dst: PathBuf,
/// This describes the layout of each page, and is not modified after
/// creation of the context (contains info like the favicon and added html).
pub layout: layout::Layout,
- /// This map is a list of what should be displayed on the sidebar of the
- /// current page. The key is the section header (traits, modules,
- /// functions), and the value is the list of containers belonging to this
- /// header. This map will change depending on the surrounding context of the
- /// page.
- pub sidebar: HashMap<String, Vec<NameDoc>>,
/// This flag indicates whether [src] links should be generated or not. If
/// the source files are present in the html rendering, then this will be
/// `true`.
/// Processed source-file paths
seen: HashSet<String>,
/// Root destination to place all HTML output into
- dst: Path,
+ dst: PathBuf,
}
/// Wrapper struct to render the source code of a file. This will do things like
/// Generates the documentation for `crate` into the directory `dst`
pub fn run(mut krate: clean::Crate,
external_html: &ExternalHtml,
- dst: Path,
- passes: HashSet<String>) -> old_io::IoResult<()> {
+ dst: PathBuf,
+ passes: HashSet<String>) -> io::Result<()> {
+ let src_root = match krate.src.parent() {
+ Some(p) => p.to_path_buf(),
+ None => PathBuf::new(""),
+ };
let mut cx = Context {
dst: dst,
- src_root: krate.src.dir_path(),
+ src_root: src_root,
passes: passes,
current: Vec::new(),
root_path: String::new(),
- sidebar: HashMap::new(),
layout: layout::Layout {
logo: "".to_string(),
favicon: "".to_string(),
cx.krate(krate, summary)
}
-fn build_index(krate: &clean::Crate, cache: &mut Cache) -> old_io::IoResult<String> {
+fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::Result<String> {
// Build the search index from the collected metadata
let mut nodeid_to_pathid = HashMap::new();
let mut pathid_to_nodeid = Vec::new();
}
// Collect the index into a string
- let mut w = Vec::new();
+ let mut w = io::Cursor::new(Vec::new());
try!(write!(&mut w, r#"searchIndex['{}'] = {{"items":["#, krate.name));
let mut lastpath = "".to_string();
try!(write!(&mut w, "]}};"));
- Ok(String::from_utf8(w).unwrap())
+ Ok(String::from_utf8(w.into_inner()).unwrap())
}
fn write_shared(cx: &Context,
krate: &clean::Crate,
cache: &Cache,
- search_index: String) -> old_io::IoResult<()> {
+ search_index: String) -> io::Result<()> {
// Write out the shared files. Note that these are shared among all rustdoc
// docs placed in the output directory, so this needs to be a synchronized
// operation with respect to all other rustdocs running around.
include_bytes!("static/SourceCodePro-Semibold.woff")));
fn collect(path: &Path, krate: &str,
- key: &str) -> old_io::IoResult<Vec<String>> {
+ key: &str) -> io::Result<Vec<String>> {
let mut ret = Vec::new();
if path.exists() {
- for line in BufferedReader::new(File::open(path)).lines() {
+ for line in BufReader::new(try!(File::open(path))).lines() {
let line = try!(line);
if !line.starts_with(key) {
continue
mydst.push(part);
try!(mkdir(&mydst));
}
- mydst.push(format!("{}.{}.js",
- remote_item_type.to_static_str(),
- remote_path[remote_path.len() - 1]));
+ mydst.push(&format!("{}.{}.js",
+ remote_item_type.to_static_str(),
+ remote_path[remote_path.len() - 1]));
let all_implementors = try!(collect(&mydst, &krate.name,
"implementors"));
- try!(mkdir(&mydst.dir_path()));
- let mut f = BufferedWriter::new(try!(File::create(&mydst)));
+ try!(mkdir(mydst.parent().unwrap()));
+ let mut f = BufWriter::new(try!(File::create(&mydst)));
try!(writeln!(&mut f, "(function() {{var implementors = {{}};"));
for implementor in &all_implementors {
}
fn render_sources(cx: &mut Context,
- krate: clean::Crate) -> old_io::IoResult<clean::Crate> {
+ krate: clean::Crate) -> io::Result<clean::Crate> {
info!("emitting source files");
let dst = cx.dst.join("src");
try!(mkdir(&dst));
/// Writes the entire contents of a string to a destination, not attempting to
/// catch any errors.
-fn write(dst: Path, contents: &[u8]) -> old_io::IoResult<()> {
- File::create(&dst).write_all(contents)
+fn write(dst: PathBuf, contents: &[u8]) -> io::Result<()> {
+ try!(File::create(&dst)).write_all(contents)
}
/// Makes a directory on the filesystem, failing the task if an error occurs and
/// skipping if the directory already exists.
-fn mkdir(path: &Path) -> old_io::IoResult<()> {
+fn mkdir(path: &Path) -> io::Result<()> {
if !path.exists() {
- fs::mkdir(path, old_io::USER_RWX)
+ fs::create_dir(path)
} else {
Ok(())
}
/// static HTML tree.
// FIXME (#9639): The closure should deal with &[u8] instead of &str
// FIXME (#9639): This is too conservative, rejecting non-UTF-8 paths
-fn clean_srcpath<F>(src_root: &Path, src: &[u8], mut f: F) where
+fn clean_srcpath<F>(src_root: &Path, p: &Path, mut f: F) where
F: FnMut(&str),
{
- let p = Path::new(src);
-
// make it relative, if possible
- let p = p.path_relative_from(src_root).unwrap_or(p);
+ let p = p.relative_from(src_root).unwrap_or(p);
- if p.as_vec() != b"." {
- for c in p.str_components().map(|x|x.unwrap()) {
- if ".." == c {
- f("up");
- } else {
- f(c)
- }
+ for c in p.iter().map(|x| x.to_str().unwrap()) {
+ if ".." == c {
+ f("up");
+ } else {
+ f(c)
}
}
}
impl<'a> SourceCollector<'a> {
/// Renders the given filename into its corresponding HTML source file.
- fn emit_source(&mut self, filename: &str) -> old_io::IoResult<()> {
- let p = Path::new(filename);
+ fn emit_source(&mut self, filename: &str) -> io::Result<()> {
+ let p = PathBuf::new(filename);
// If we couldn't open this file, then just returns because it
// probably means that it's some standard library macro thing and we
// can't have the source to it anyway.
- let contents = match File::open(&p).read_to_end() {
+ let mut contents = Vec::new();
+ match File::open(&p).and_then(|mut f| f.read_to_end(&mut contents)) {
Ok(r) => r,
// macros from other libraries get special filenames which we can
// safely ignore
// Create the intermediate directories
let mut cur = self.dst.clone();
let mut root_path = String::from_str("../../");
- clean_srcpath(&self.cx.src_root, p.dirname(), |component| {
+ clean_srcpath(&self.cx.src_root, &p, |component| {
cur.push(component);
mkdir(&cur).unwrap();
root_path.push_str("../");
});
- let mut fname = p.filename().expect("source has no filename").to_vec();
- fname.extend(".html".bytes());
- cur.push(fname);
- let mut w = BufferedWriter::new(try!(File::create(&cur)));
+ let mut fname = p.file_name().expect("source has no filename")
+ .to_os_string();
+ fname.push_os_str(OsStr::from_str(".html"));
+ cur.push(&fname);
+ let mut w = BufWriter::new(try!(File::create(&cur)));
- let title = format!("{} -- source", cur.filename_display());
+ let title = format!("{} -- source", cur.file_name().unwrap()
+ .to_string_lossy());
let desc = format!("Source to the Rust file `{}`.", filename);
let page = layout::Page {
title: &title,
description: &desc,
keywords: get_basic_keywords(),
};
- try!(layout::render(&mut w as &mut Writer, &self.cx.layout,
+ try!(layout::render(&mut w, &self.cx.layout,
&page, &(""), &Source(contents)));
try!(w.flush());
return Ok(());
/// This currently isn't parallelized, but it'd be pretty easy to add
/// parallelization to this function.
fn krate(mut self, mut krate: clean::Crate,
- stability: stability_summary::ModuleSummary) -> old_io::IoResult<()> {
+ stability: stability_summary::ModuleSummary) -> io::Result<()> {
let mut item = match krate.module.take() {
Some(i) => i,
None => return Ok(())
// render stability dashboard
try!(self.recurse(stability.name.clone(), |this| {
let json_dst = &this.dst.join("stability.json");
- let mut json_out = BufferedWriter::new(try!(File::create(json_dst)));
+ let mut json_out = BufWriter::new(try!(File::create(json_dst)));
try!(write!(&mut json_out, "{}", json::as_json(&stability)));
let mut title = stability.name.clone();
keywords: get_basic_keywords(),
};
let html_dst = &this.dst.join("stability.html");
- let mut html_out = BufferedWriter::new(try!(File::create(html_dst)));
+ let mut html_out = BufWriter::new(try!(File::create(html_dst)));
layout::render(&mut html_out, &this.layout, &page,
&Sidebar{ cx: this, item: &item },
&stability)
/// all sub-items which need to be rendered.
///
/// The rendering driver uses this closure to queue up more work.
- fn item<F>(&mut self, item: clean::Item, mut f: F) -> old_io::IoResult<()> where
+ fn item<F>(&mut self, item: clean::Item, mut f: F) -> io::Result<()> where
F: FnMut(&mut Context, clean::Item),
{
- fn render(w: old_io::File, cx: &Context, it: &clean::Item,
- pushname: bool) -> old_io::IoResult<()> {
- info!("Rendering an item to {}", w.path().display());
+ fn render(w: File, cx: &Context, it: &clean::Item,
+ pushname: bool) -> io::Result<()> {
+ info!("Rendering an item to {}", w.path().unwrap().display());
// A little unfortunate that this is done like this, but it sure
// does make formatting *a lot* nicer.
CURRENT_LOCATION_KEY.with(|slot| {
// We have a huge number of calls to write, so try to alleviate some
// of the pain by using a buffered writer instead of invoking the
// write syscall all the time.
- let mut writer = BufferedWriter::new(w);
+ let mut writer = BufWriter::new(w);
if !cx.render_redirect_pages {
try!(layout::render(&mut writer, &cx.layout, &page,
&Sidebar{ cx: cx, item: it },
clean::ModuleItem(m) => m,
_ => unreachable!()
};
- this.sidebar = this.build_sidebar(&m);
+
+ // render sidebar-items.js used throughout this module
+ {
+ let items = this.build_sidebar_items(&m);
+ let js_dst = this.dst.join("sidebar-items.js");
+ let mut js_out = BufferedWriter::new(try!(File::create(&js_dst)));
+ try!(write!(&mut js_out, "initSidebarItems({});",
+ json::as_json(&items)));
+ }
+
for item in m.items {
f(this,item);
}
// Things which don't have names (like impls) don't get special
// pages dedicated to them.
_ if item.name.is_some() => {
- let dst = self.dst.join(item_path(&item));
+ let dst = self.dst.join(&item_path(&item));
let dst = try!(File::create(&dst));
render(dst, self, &item, true)
}
}
}
- fn build_sidebar(&self, m: &clean::Module) -> HashMap<String, Vec<NameDoc>> {
+ fn build_sidebar_items(&self, m: &clean::Module) -> HashMap<String, Vec<NameDoc>> {
let mut map = HashMap::new();
for item in &m.items {
if self.ignore_private_item(item) { continue }
- // avoid putting foreign items to the sidebar.
- if let &clean::ForeignFunctionItem(..) = &item.inner { continue }
- if let &clean::ForeignStaticItem(..) = &item.inner { continue }
-
let short = shortty(item).to_static_str();
let myname = match item.name {
None => continue,
let short = short.to_string();
let v = map.entry(short).get().unwrap_or_else(
|vacant_entry| vacant_entry.insert(Vec::with_capacity(1)));
- v.push(NameDoc(myname, Some(shorter_line(item.doc_value()))));
+ v.push((myname, Some(shorter_line(item.doc_value()))));
}
for (_, items) in &mut map {
// has anchors for the line numbers that we're linking to.
if ast_util::is_local(self.item.def_id) {
let mut path = Vec::new();
- clean_srcpath(&cx.src_root, self.item.source.filename.as_bytes(),
+ clean_srcpath(&cx.src_root, Path::new(&self.item.source.filename),
|component| {
path.push(component.to_string());
});
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let cx = self.cx;
let it = self.item;
+ let parentlen = cx.current.len() - if it.is_mod() {1} else {0};
+
+ // the sidebar is designed to display sibling functions, modules and
+ // other miscellaneous informations. since there are lots of sibling
+ // items (and that causes quadratic growth in large modules),
+ // we refactor common parts into a shared JavaScript file per module.
+ // still, we don't move everything into JS because we want to preserve
+ // as much HTML as possible in order to allow non-JS-enabled browsers
+ // to navigate the documentation (though slightly inefficiently).
+
try!(write!(fmt, "<p class='location'>"));
- let len = cx.current.len() - if it.is_mod() {1} else {0};
- for (i, name) in cx.current.iter().take(len).enumerate() {
+ for (i, name) in cx.current.iter().take(parentlen).enumerate() {
if i > 0 {
try!(write!(fmt, "::<wbr>"));
}
}
try!(write!(fmt, "</p>"));
- fn block(w: &mut fmt::Formatter, short: &str, longty: &str,
- cur: &clean::Item, cx: &Context) -> fmt::Result {
- let items = match cx.sidebar.get(short) {
- Some(items) => items,
- None => return Ok(())
- };
- try!(write!(w, "<div class='block {}'><h2>{}</h2>", short, longty));
- for &NameDoc(ref name, ref doc) in items {
- let curty = shortty(cur).to_static_str();
- let class = if cur.name.as_ref().unwrap() == name &&
- short == curty { "current" } else { "" };
- try!(write!(w, "<a class='{ty} {class}' href='{href}{path}' \
- title='{title}'>{name}</a>",
- ty = short,
- class = class,
- href = if curty == "mod" {"../"} else {""},
- path = if short == "mod" {
- format!("{}/index.html", name)
- } else {
- format!("{}.{}.html", short, name)
- },
- title = Escape(doc.as_ref().unwrap()),
- name = name));
- }
- try!(write!(w, "</div>"));
- Ok(())
+ // sidebar refers to the enclosing module, not this module
+ let relpath = if shortty(it) == ItemType::Module { "../" } else { "" };
+ try!(write!(fmt,
+ "<script>window.sidebarCurrent = {{\
+ name: '{name}', \
+ ty: '{ty}', \
+ relpath: '{path}'\
+ }};</script>",
+ name = it.name.as_ref().map(|x| &x[..]).unwrap_or(""),
+ ty = shortty(it).to_static_str(),
+ path = relpath));
+ if parentlen == 0 {
+ // there is no sidebar-items.js beyond the crate root path
+ // FIXME maybe dynamic crate loading can be merged here
+ } else {
+ try!(write!(fmt, "<script async src=\"{path}sidebar-items.js\"></script>",
+ path = relpath));
}
- try!(block(fmt, "mod", "Modules", it, cx));
- try!(block(fmt, "struct", "Structs", it, cx));
- try!(block(fmt, "enum", "Enums", it, cx));
- try!(block(fmt, "trait", "Traits", it, cx));
- try!(block(fmt, "fn", "Functions", it, cx));
- try!(block(fmt, "macro", "Macros", it, cx));
Ok(())
}
}
"use strict";
var resizeTimeout, interval;
+ // This mapping table should match the discriminants of
+ // `rustdoc::html::item_type::ItemType` type in Rust.
+ var itemTypes = ["mod",
+ "externcrate",
+ "import",
+ "struct",
+ "enum",
+ "fn",
+ "type",
+ "static",
+ "trait",
+ "impl",
+ "tymethod",
+ "method",
+ "structfield",
+ "variant",
+ "macro",
+ "primitive",
+ "associatedtype",
+ "constant"];
+
$('.js-only').removeClass('js-only');
function getQueryStringParams() {
showResults(results);
}
- // This mapping table should match the discriminants of
- // `rustdoc::html::item_type::ItemType` type in Rust.
- var itemTypes = ["mod",
- "externcrate",
- "import",
- "struct",
- "enum",
- "fn",
- "type",
- "static",
- "trait",
- "impl",
- "tymethod",
- "method",
- "structfield",
- "variant",
- "macro",
- "primitive",
- "associatedtype",
- "constant"];
-
function itemTypeFromName(typename) {
for (var i = 0; i < itemTypes.length; ++i) {
if (itemTypes[i] === typename) return i;
window.initSearch = initSearch;
+ // delayed sidebar rendering.
+ function initSidebarItems(items) {
+ var sidebar = $('.sidebar');
+ var current = window.sidebarCurrent;
+
+ function block(shortty, longty) {
+ var filtered = items[shortty];
+ if (!filtered) return;
+
+ var div = $('<div>').attr('class', 'block ' + shortty);
+ div.append($('<h2>').text(longty));
+
+ for (var i = 0; i < filtered.length; ++i) {
+ var item = filtered[i];
+ var name = item[0];
+ var desc = item[1]; // can be null
+
+ var klass = shortty;
+ if (name === current.name && shortty == current.ty) {
+ klass += ' current';
+ }
+ var path;
+ if (shortty === 'mod') {
+ path = name + '/index.html';
+ } else {
+ path = shortty + '.' + name + '.html';
+ }
+ div.append($('<a>', {'href': current.relpath + path,
+ 'title': desc,
+ 'class': klass}).text(name));
+ }
+ sidebar.append(div);
+ }
+
+ block("mod", "Modules");
+ block("struct", "Structs");
+ block("enum", "Enums");
+ block("trait", "Traits");
+ block("fn", "Functions");
+ block("macro", "Macros");
+ }
+
+ window.initSidebarItems = initSidebarItems;
+
window.register_implementors = function(imp) {
var list = $('#implementors-list');
var libs = Object.getOwnPropertyNames(imp);
#![feature(test)]
#![feature(unicode)]
#![feature(str_words)]
+#![feature(io)]
+#![feature(fs)]
+#![feature(path)]
+#![feature(tempdir)]
extern crate arena;
extern crate getopts;
use std::cell::RefCell;
use std::collections::HashMap;
use std::env;
-use std::old_io::File;
-use std::old_io;
+use std::fs::File;
+use std::io::{self, Read, Write};
+use std::path::PathBuf;
use std::rc::Rc;
use std::sync::mpsc::channel;
+
use externalfiles::ExternalHtml;
use serialize::Decodable;
use serialize::json::{self, Json};
let should_test = matches.opt_present("test");
let markdown_input = input.ends_with(".md") || input.ends_with(".markdown");
- let output = matches.opt_str("o").map(|s| Path::new(s));
+ let output = matches.opt_str("o").map(|s| PathBuf::new(&s));
let cfgs = matches.opt_strs("cfg");
let external_html = match ExternalHtml::load(
(true, false) => {
return test::run(input, cfgs, libs, externs, test_args, crate_name)
}
- (false, true) => return markdown::render(input, output.unwrap_or(Path::new("doc")),
+ (false, true) => return markdown::render(input,
+ output.unwrap_or(PathBuf::new("doc")),
&matches, &external_html,
!matches.opt_present("markdown-no-toc")),
(false, false) => {}
info!("going to format");
match matches.opt_str("w").as_ref().map(|s| &**s) {
Some("html") | None => {
- match html::render::run(krate, &external_html, output.unwrap_or(Path::new("doc")),
+ match html::render::run(krate, &external_html,
+ output.unwrap_or(PathBuf::new("doc")),
passes.into_iter().collect()) {
Ok(()) => {}
Err(e) => panic!("failed to generate documentation: {}", e),
}
Some("json") => {
match json_output(krate, json_plugins,
- output.unwrap_or(Path::new("doc.json"))) {
+ output.unwrap_or(PathBuf::new("doc.json"))) {
Ok(()) => {}
Err(e) => panic!("failed to write json: {}", e),
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
- let cr = Path::new(cratefile);
+ let cr = PathBuf::new(cratefile);
info!("starting to run rustc");
let (tx, rx) = channel();
std::thread::spawn(move || {
use rustc::session::config::Input;
- let cr = cr;
- tx.send(core::run_core(paths, cfgs, externs, Input::File(cr), triple)).unwrap();
+ tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
+ triple)).unwrap();
}).join().map_err(|_| "rustc failed").unwrap();
let (mut krate, analysis) = rx.recv().unwrap();
info!("finished with rustc");
/// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, String> {
- let mut input = match File::open(&Path::new(input)) {
- Ok(f) => f,
- Err(e) => {
- return Err(format!("couldn't open {}: {}", input, e))
- }
+ let mut bytes = Vec::new();
+ match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) {
+ Ok(()) => {}
+ Err(e) => return Err(format!("couldn't open {}: {}", input, e)),
};
- match json::from_reader(&mut input) {
+ match json::from_reader(&mut &bytes[..]) {
Err(s) => Err(format!("{:?}", s)),
Ok(Json::Object(obj)) => {
let mut obj = obj;
/// Outputs the crate/plugin json as a giant json blob at the specified
/// destination.
fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson> ,
- dst: Path) -> old_io::IoResult<()> {
+ dst: PathBuf) -> io::Result<()> {
// {
// "schema": version,
// "crate": { parsed crate ... },
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fs::File;
+use std::io::Write;
use std::old_io;
+use std::path::{PathBuf, Path};
use core;
use getopts;
/// Render `input` (e.g. "foo.md") into an HTML file in `output`
/// (e.g. output = "bar" => "bar/foo.html").
-pub fn render(input: &str, mut output: Path, matches: &getopts::Matches,
+pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches,
external_html: &ExternalHtml, include_toc: bool) -> int {
let input_p = Path::new(input);
- output.push(input_p.filestem().unwrap());
+ output.push(input_p.file_stem().unwrap());
output.set_extension("html");
let mut css = String::new();
}
let playground = playground.unwrap_or("".to_string());
- let mut out = match old_io::File::create(&output) {
+ let mut out = match File::create(&output) {
Err(e) => {
let _ = writeln!(&mut old_io::stderr(),
"error opening `{}` for writing: {}",
// except according to those terms.
use std::cell::RefCell;
-use std::sync::mpsc::channel;
+use std::collections::{HashSet, HashMap};
use std::dynamic_lib::DynamicLibrary;
-use std::old_io::{Command, TempDir};
+use std::env;
+use std::ffi::OsString;
+use std::fs::TempDir;
use std::old_io;
-use std::os;
+use std::io;
+use std::path::PathBuf;
+use std::process::Command;
use std::str;
+use std::sync::mpsc::channel;
use std::thread;
use std::thunk::Thunk;
-use std::collections::{HashSet, HashMap};
use testing;
use rustc_lint;
use rustc::session::{self, config};
mut test_args: Vec<String>,
crate_name: Option<String>)
-> int {
- let input_path = Path::new(input);
+ let input_path = PathBuf::new(input);
let input = config::Input::File(input_path.clone());
let sessopts = config::Options {
- maybe_sysroot: Some(os::self_exe_name().unwrap().dir_path().dir_path()),
+ maybe_sysroot: Some(env::current_exe().unwrap().parent().unwrap()
+ .parent().unwrap().to_path_buf()),
search_paths: libs.clone(),
crate_types: vec!(config::CrateTypeDylib),
externs: externs.clone(),
0
}
+#[allow(deprecated)]
fn runtest(test: &str, cratename: &str, libs: SearchPaths,
externs: core::Externs,
should_fail: bool, no_run: bool, as_test_harness: bool) {
let input = config::Input::Str(test.to_string());
let sessopts = config::Options {
- maybe_sysroot: Some(os::self_exe_name().unwrap().dir_path().dir_path()),
+ maybe_sysroot: Some(env::current_exe().unwrap().parent().unwrap()
+ .parent().unwrap().to_path_buf()),
search_paths: libs,
crate_types: vec!(config::CrateTypeExecutable),
output_types: vec!(config::OutputTypeExe),
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let outdir = TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir");
- let out = Some(outdir.path().clone());
+ let out = Some(outdir.path().to_path_buf());
let cfg = config::build_configuration(&sess);
let libdir = sess.target_filesearch(PathKind::All).get_lib_path();
let mut control = driver::CompileController::basic();
// environment to ensure that the target loads the right libraries at
// runtime. It would be a sad day if the *host* libraries were loaded as a
// mistake.
- let mut cmd = Command::new(outdir.path().join("rust-out"));
+ let mut cmd = Command::new(&outdir.path().join("rust-out"));
+ let var = DynamicLibrary::envvar();
let newpath = {
- let mut path = DynamicLibrary::search_path();
+ let path = env::var_os(var).unwrap_or(OsString::new());
+ let mut path = env::split_paths(&path).collect::<Vec<_>>();
path.insert(0, libdir.clone());
- DynamicLibrary::create_path(&path)
+ env::join_paths(path.iter()).unwrap()
};
- cmd.env(DynamicLibrary::envvar(), newpath);
+ cmd.env(var, &newpath);
match cmd.output() {
Err(e) => panic!("couldn't run the test: {}{}", e,
- if e.kind == old_io::PermissionDenied {
+ if e.kind() == io::ErrorKind::PermissionDenied {
" - maybe your tempdir is mounted with noexec?"
} else { "" }),
Ok(out) => {
panic!("test executable succeeded when it should have failed");
} else if !should_fail && !out.status.success() {
panic!("test executable failed:\n{:?}",
- str::from_utf8(&out.error));
+ str::from_utf8(&out.stdout));
}
}
}
// This may be an overestimate if there is any whitespace
let mut b = Vec::with_capacity(self.len() / 2);
let mut modulus = 0;
- let mut buf = 0u8;
+ let mut buf = 0;
for (idx, byte) in self.bytes().enumerate() {
buf <<= 4;
fn decode_hex_escape(&mut self) -> Result<u16, ParserError> {
let mut i = 0;
- let mut n = 0u16;
+ let mut n = 0;
while i < 4 && !self.eof() {
self.bump();
n = match self.ch_or_null() {
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(unicode)]
+#![feature(path)]
#![cfg_attr(test, feature(test))]
// test harness access
*/
use std::old_path;
+use std::path;
use std::rc::Rc;
use std::cell::{Cell, RefCell};
use std::sync::Arc;
}
}
+impl Encodable for path::PathBuf {
+ fn encode<S: Encoder>(&self, e: &mut S) -> Result<(), S::Error> {
+ self.to_str().unwrap().encode(e)
+ }
+}
+
+impl Decodable for path::PathBuf {
+ fn decode<D: Decoder>(d: &mut D) -> Result<path::PathBuf, D::Error> {
+ let bytes: String = try!(Decodable::decode(d));
+ Ok(path::PathBuf::new(&bytes))
+ }
+}
+
impl<T: Encodable + Copy> Encodable for Cell<T> {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
self.get().encode(s)
impl AsciiExt for u8 {
type Owned = u8;
#[inline]
- fn is_ascii(&self) -> bool { *self & 128 == 0u8 }
+ fn is_ascii(&self) -> bool { *self & 128 == 0 }
#[inline]
fn to_ascii_uppercase(&self) -> u8 { ASCII_UPPERCASE_MAP[*self as usize] }
#[inline]
assert_eq!("url()URL()uRl()ürl".to_ascii_uppercase(), "URL()URL()URL()üRL");
assert_eq!("hıKß".to_ascii_uppercase(), "HıKß");
- for i in 0u32..501 {
+ for i in 0..501 {
let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 }
else { i };
assert_eq!((from_u32(i).unwrap()).to_string().to_ascii_uppercase(),
// Dotted capital I, Kelvin sign, Sharp S.
assert_eq!("HİKß".to_ascii_lowercase(), "hİKß");
- for i in 0u32..501 {
+ for i in 0..501 {
let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }
else { i };
assert_eq!((from_u32(i).unwrap()).to_string().to_ascii_lowercase(),
"URL()URL()URL()üRL".to_string());
assert_eq!(("hıKß".to_string()).into_ascii_uppercase(), "HıKß");
- for i in 0u32..501 {
+ for i in 0..501 {
let upper = if 'a' as u32 <= i && i <= 'z' as u32 { i + 'A' as u32 - 'a' as u32 }
else { i };
assert_eq!((from_u32(i).unwrap()).to_string().into_ascii_uppercase(),
// Dotted capital I, Kelvin sign, Sharp S.
assert_eq!(("HİKß".to_string()).into_ascii_lowercase(), "hİKß");
- for i in 0u32..501 {
+ for i in 0..501 {
let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }
else { i };
assert_eq!((from_u32(i).unwrap()).to_string().into_ascii_lowercase(),
assert!(!"K".eq_ignore_ascii_case("k"));
assert!(!"ß".eq_ignore_ascii_case("s"));
- for i in 0u32..501 {
+ for i in 0..501 {
let lower = if 'A' as u32 <= i && i <= 'Z' as u32 { i + 'a' as u32 - 'A' as u32 }
else { i };
assert!((from_u32(i).unwrap()).to_string().eq_ignore_ascii_case(
use rt::heap::{allocate, deallocate, EMPTY};
use collections::hash_state::HashState;
-const EMPTY_BUCKET: u64 = 0u64;
+const EMPTY_BUCKET: u64 = 0;
/// The raw hashtable, providing safe-ish access to the unzipped and highly
/// optimized arrays of hashes, keys, and values.
{
let mut state = hash_state.hasher();
t.hash(&mut state);
- // We need to avoid 0u64 in order to prevent collisions with
+ // We need to avoid 0 in order to prevent collisions with
// EMPTY_HASH. We can maintain our precious uniform distribution
// of initial indexes by unconditionally setting the MSB,
// effectively reducing 64-bits hashes to 63 bits.
//! * You want a bit vector.
//!
//! ### Use a `BitSet` when:
-//! * You want a `VecSet`.
+//! * You want a `BitVec`, but want `Set` properties
//!
//! ### Use a `BinaryHeap` when:
//! * You want to store a bunch of elements, but only ever want to process the "biggest"
//!
//! Choosing the right collection for the job requires an understanding of what each collection
//! is good at. Here we briefly summarize the performance of different collections for certain
-//! important operations. For further details, see each type's documentation.
+//! important operations. For further details, see each type's documentation, and note that the
+//! names of actual methods may differ from the tables below on certain collections.
//!
//! Throughout the documentation, we will follow a few conventions. For all operations,
//! the collection's size is denoted by n. If another collection is involved in the operation, it
//! a variant of the `Entry` enum.
//!
//! If a `Vacant(entry)` is yielded, then the key *was not* found. In this case the
-//! only valid operation is to `set` the value of the entry. When this is done,
+//! only valid operation is to `insert` a value into the entry. When this is done,
//! the vacant entry is consumed and converted into a mutable reference to the
//! the value that was inserted. This allows for further manipulation of the value
//! beyond the lifetime of the search itself. This is useful if complex logic needs to
//! be performed on the value regardless of whether the value was just inserted.
//!
//! If an `Occupied(entry)` is yielded, then the key *was* found. In this case, the user
-//! has several options: they can `get`, `set`, or `take` the value of the occupied
+//! has several options: they can `get`, `insert`, or `remove` the value of the occupied
//! entry. Additionally, they can convert the occupied entry into a mutable reference
-//! to its value, providing symmetry to the vacant `set` case.
+//! to its value, providing symmetry to the vacant `insert` case.
//!
//! ### Examples
//!
//! use std::collections::btree_map::{BTreeMap, Entry};
//!
//! // A client of the bar. They have an id and a blood alcohol level.
-//! struct Person { id: u32, blood_alcohol: f32 };
+//! struct Person { id: u32, blood_alcohol: f32 }
//!
//! // All the orders made to the bar, by client id.
//! let orders = vec![1,2,1,2,3,4,1,2,2,3,4,1,1,1];
mod os {
pub const FAMILY: &'static str = "unix";
pub const OS: &'static str = "ios";
+ pub const DLL_PREFIX: &'static str = "lib";
+ pub const DLL_SUFFIX: &'static str = ".dylib";
+ pub const DLL_EXTENSION: &'static str = "dylib";
pub const EXE_SUFFIX: &'static str = "";
pub const EXE_EXTENSION: &'static str = "";
}
use prelude::v1::*;
use super::*;
use libc;
- use mem;
#[test]
fn c_to_rust() {
let data = b"123\0";
let ptr = data.as_ptr() as *const libc::c_char;
unsafe {
- assert_eq!(c_str_to_bytes(&ptr), b"123");
- assert_eq!(c_str_to_bytes_with_nul(&ptr), b"123\0");
+ assert_eq!(CStr::from_ptr(ptr).to_bytes(), b"123");
+ assert_eq!(CStr::from_ptr(ptr).to_bytes_with_nul(), b"123\0");
}
}
check!(w.write(msg));
}
let files = check!(fs::read_dir(dir));
- let mut mem = [0u8; 4];
+ let mut mem = [0; 4];
for f in files {
let f = f.unwrap().path();
{
check!(File::create(&dir2.join("14")));
let files = check!(fs::walk_dir(dir));
- let mut cur = [0u8; 2];
+ let mut cur = [0; 2];
for f in files {
let f = f.unwrap().path();
let stem = f.file_stem().unwrap().to_str().unwrap();
#[test]
fn read_char_buffered() {
- let buf = [195u8, 159u8];
+ let buf = [195, 159];
let reader = BufReader::with_capacity(1, &buf[..]);
assert_eq!(reader.chars().next(), Some(Ok('ß')));
}
#[test]
fn test_chars() {
- let buf = [195u8, 159u8, b'a'];
+ let buf = [195, 159, b'a'];
let reader = BufReader::with_capacity(1, &buf[..]);
let mut it = reader.chars();
assert_eq!(it.next(), Some(Ok('ß')));
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![allow(missing_copy_implementations)]
-
use prelude::v1::*;
use io::prelude::*;
/// Implementations of the I/O traits for `Cursor<T>` are not currently generic
/// over `T` itself. Instead, specific implementations are provided for various
/// in-memory buffer types like `Vec<u8>` and `&[u8]`.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Cursor<T> {
inner: T,
pos: u64,
impl<T> Cursor<T> {
/// Create a new cursor wrapping the provided underlying I/O object.
+ #[stable(feature = "rust1", since = "1.0.0")]
pub fn new(inner: T) -> Cursor<T> {
Cursor { pos: 0, inner: inner }
}
/// Consume this cursor, returning the underlying value.
+ #[stable(feature = "rust1", since = "1.0.0")]
pub fn into_inner(self) -> T { self.inner }
/// Get a reference to the underlying value in this cursor.
+ #[stable(feature = "rust1", since = "1.0.0")]
pub fn get_ref(&self) -> &T { &self.inner }
/// Get a mutable reference to the underlying value in this cursor.
///
/// Care should be taken to avoid modifying the internal I/O state of the
/// underlying value as it may corrupt this cursor's position.
+ #[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self) -> &mut T { &mut self.inner }
/// Returns the current value of this cursor
+ #[stable(feature = "rust1", since = "1.0.0")]
pub fn position(&self) -> u64 { self.pos }
/// Sets the value of this cursor
+ #[stable(feature = "rust1", since = "1.0.0")]
pub fn set_position(&mut self, pos: u64) { self.pos = pos; }
}
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> io::Seek for Cursor<&'a [u8]> { seek!(); }
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> io::Seek for Cursor<&'a mut [u8]> { seek!(); }
+#[stable(feature = "rust1", since = "1.0.0")]
impl io::Seek for Cursor<Vec<u8>> { seek!(); }
macro_rules! read {
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Read for Cursor<&'a [u8]> { read!(); }
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Read for Cursor<&'a mut [u8]> { read!(); }
+#[stable(feature = "rust1", since = "1.0.0")]
impl Read for Cursor<Vec<u8>> { read!(); }
macro_rules! buffer {
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> BufRead for Cursor<&'a [u8]> { buffer!(); }
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> BufRead for Cursor<&'a mut [u8]> { buffer!(); }
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> BufRead for Cursor<Vec<u8>> { buffer!(); }
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Write for Cursor<&'a mut [u8]> {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
let pos = cmp::min(self.pos, self.inner.len() as u64);
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl Write for Cursor<Vec<u8>> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
// Make sure the internal buffer is as least as big as where we
#[test]
fn test_mem_reader() {
- let mut reader = Cursor::new(vec!(0u8, 1, 2, 3, 4, 5, 6, 7));
+ let mut reader = Cursor::new(vec!(0, 1, 2, 3, 4, 5, 6, 7));
let mut buf = [];
assert_eq!(reader.read(&mut buf), Ok(0));
assert_eq!(reader.position(), 0);
#[test]
fn read_to_end() {
- let mut reader = Cursor::new(vec!(0u8, 1, 2, 3, 4, 5, 6, 7));
+ let mut reader = Cursor::new(vec!(0, 1, 2, 3, 4, 5, 6, 7));
let mut v = Vec::new();
reader.read_to_end(&mut v).ok().unwrap();
assert_eq!(v, [0, 1, 2, 3, 4, 5, 6, 7]);
#[test]
fn test_slice_reader() {
- let in_buf = vec![0u8, 1, 2, 3, 4, 5, 6, 7];
+ let in_buf = vec![0, 1, 2, 3, 4, 5, 6, 7];
let mut reader = &mut in_buf.as_slice();
let mut buf = [];
assert_eq!(reader.read(&mut buf), Ok(0));
#[test]
fn test_buf_reader() {
- let in_buf = vec![0u8, 1, 2, 3, 4, 5, 6, 7];
+ let in_buf = vec![0, 1, 2, 3, 4, 5, 6, 7];
let mut reader = Cursor::new(in_buf.as_slice());
let mut buf = [];
assert_eq!(reader.read(&mut buf), Ok(0));
assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10));
assert_eq!(r.read(&mut [0]), Ok(0));
- let mut r = Cursor::new(vec!(10u8));
+ let mut r = Cursor::new(vec!(10));
assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10));
assert_eq!(r.read(&mut [0]), Ok(0));
#[test]
fn seek_before_0() {
- let buf = [0xff_u8];
+ let buf = [0xff];
let mut r = Cursor::new(&buf[..]);
assert!(r.seek(SeekFrom::End(-2)).is_err());
- let mut r = Cursor::new(vec!(10u8));
+ let mut r = Cursor::new(vec!(10));
assert!(r.seek(SeekFrom::End(-2)).is_err());
let mut buf = [0];
// =============================================================================
// Forwarding implementations
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, R: Read + ?Sized> Read for &'a mut R {
- fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { (**self).read(buf) }
-
- fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<()> { (**self).read_to_end(buf) }
-
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ (**self).read(buf)
+ }
+ fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<()> {
+ (**self).read_to_end(buf)
+ }
fn read_to_string(&mut self, buf: &mut String) -> io::Result<()> {
(**self).read_to_string(buf)
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, W: Write + ?Sized> Write for &'a mut W {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }
-
- fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { (**self).write_all(buf) }
-
- fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> { (**self).write_fmt(fmt) }
-
fn flush(&mut self) -> io::Result<()> { (**self).flush() }
+ fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
+ (**self).write_all(buf)
+ }
+ fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {
+ (**self).write_fmt(fmt)
+ }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, S: Seek + ?Sized> Seek for &'a mut S {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: BufRead + ?Sized> BufRead for &'a mut B {
fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }
-
fn consume(&mut self, amt: usize) { (**self).consume(amt) }
-
fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<()> {
(**self).read_until(byte, buf)
}
-
- fn read_line(&mut self, buf: &mut String) -> io::Result<()> { (**self).read_line(buf) }
+ fn read_line(&mut self, buf: &mut String) -> io::Result<()> {
+ (**self).read_line(buf)
+ }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<R: Read + ?Sized> Read for Box<R> {
- fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { (**self).read(buf) }
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ (**self).read(buf)
+ }
+ fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<()> {
+ (**self).read_to_end(buf)
+ }
+ fn read_to_string(&mut self, buf: &mut String) -> io::Result<()> {
+ (**self).read_to_string(buf)
+ }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<W: Write + ?Sized> Write for Box<W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> { (**self).write(buf) }
fn flush(&mut self) -> io::Result<()> { (**self).flush() }
+ fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
+ (**self).write_all(buf)
+ }
+ fn write_fmt(&mut self, fmt: fmt::Arguments) -> io::Result<()> {
+ (**self).write_fmt(fmt)
+ }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<S: Seek + ?Sized> Seek for Box<S> {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { (**self).seek(pos) }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<B: BufRead + ?Sized> BufRead for Box<B> {
fn fill_buf(&mut self) -> io::Result<&[u8]> { (**self).fill_buf() }
fn consume(&mut self, amt: usize) { (**self).consume(amt) }
+ fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<()> {
+ (**self).read_until(byte, buf)
+ }
+ fn read_line(&mut self, buf: &mut String) -> io::Result<()> {
+ (**self).read_line(buf)
+ }
}
// =============================================================================
// In-memory buffer implementations
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Read for &'a [u8] {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let amt = cmp::min(buf.len(), self.len());
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> BufRead for &'a [u8] {
fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(*self) }
fn consume(&mut self, amt: usize) { *self = &self[amt..]; }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> Write for &'a mut [u8] {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
let amt = cmp::min(data.len(), self.len());
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl Write for Vec<u8> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.push_all(buf);
}
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
- try!(self.write(buf));
+ self.push_all(buf);
Ok(())
}
/// Extension methods for all instances of `Read`, typically imported through
/// `std::io::prelude::*`.
+#[unstable(feature = "io", reason = "may merge into the Read trait")]
pub trait ReadExt: Read + Sized {
/// Create a "by reference" adaptor for this instance of `Read`.
///
/// The returned adaptor also implements `Read` and will simply borrow this
/// current reader.
+ #[stable(feature = "rust1", since = "1.0.0")]
fn by_ref(&mut self) -> &mut Self { self }
/// Transform this `Read` instance to an `Iterator` over its bytes.
/// R::Err>`. The yielded item is `Ok` if a byte was successfully read and
/// `Err` otherwise for I/O errors. EOF is mapped to returning `None` from
/// this iterator.
+ #[stable(feature = "rust1", since = "1.0.0")]
fn bytes(self) -> Bytes<Self> {
Bytes { inner: self }
}
///
/// Currently this adaptor will discard intermediate data read, and should
/// be avoided if this is not desired.
+ #[unstable(feature = "io", reason = "the semantics of a partial read/write \
+ of where errors happen is currently \
+ unclear and may change")]
fn chars(self) -> Chars<Self> {
Chars { inner: self }
}
/// The returned `Read` instance will first read all bytes from this object
/// until EOF is encountered. Afterwards the output is equivalent to the
/// output of `next`.
+ #[stable(feature = "rust1", since = "1.0.0")]
fn chain<R: Read>(self, next: R) -> Chain<Self, R> {
Chain { first: self, second: next, done_first: false }
}
/// `limit` bytes, after which it will always return EOF (`Ok(0)`). Any
/// read errors will not count towards the number of bytes read and future
/// calls to `read` may succeed.
+ #[stable(feature = "rust1", since = "1.0.0")]
fn take(self, limit: u64) -> Take<Self> {
Take { inner: self, limit: limit }
}
/// Whenever the returned `Read` instance is read it will write the read
/// data to `out`. The current semantics of this implementation imply that
/// a `write` error will not report how much data was initially read.
+ #[unstable(feature = "io", reason = "the semantics of a partial read/write \
+ of where errors happen is currently \
+ unclear and may change")]
fn tee<W: Write>(self, out: W) -> Tee<Self, W> {
Tee { reader: self, writer: out }
}
/// Extension methods for all instances of `Write`, typically imported through
/// `std::io::prelude::*`.
+#[unstable(feature = "io", reason = "may merge into the Read trait")]
pub trait WriteExt: Write + Sized {
/// Create a "by reference" adaptor for this instance of `Write`.
///
/// The returned adaptor also implements `Write` and will simply borrow this
/// current writer.
+ #[stable(feature = "rust1", since = "1.0.0")]
fn by_ref(&mut self) -> &mut Self { self }
/// Creates a new writer which will write all data to both this writer and
/// implementation do not precisely track where errors happen. For example
/// an error on the second call to `write` will not report that the first
/// call to `write` succeeded.
+ #[unstable(feature = "io", reason = "the semantics of a partial read/write \
+ of where errors happen is currently \
+ unclear and may change")]
fn broadcast<W: Write>(self, other: W) -> Broadcast<Self, W> {
Broadcast { first: self, second: other }
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Write> WriteExt for T {}
/// An object implementing `Seek` internally has some form of cursor which can
///
/// This function will yield errors whenever `read_until` would have also
/// yielded an error.
+ #[unstable(feature = "io", reason = "may be renamed to not conflict with \
+ SliceExt::split")]
fn split(self, byte: u8) -> Split<Self> {
Split { buf: self, delim: byte }
}
///
/// This function will yield errors whenever `read_string` would have also
/// yielded an error.
+ #[stable(feature = "rust1", since = "1.0.0")]
fn lines(self) -> Lines<Self> {
Lines { buf: self }
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T: BufRead> BufReadExt for T {}
/// A `Write` adaptor which will write data to multiple locations.
/// Adaptor to chain together two instances of `Read`.
///
/// For more information, see `ReadExt::chain`.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chain<T, U> {
first: T,
second: U,
done_first: bool,
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Read, U: Read> Read for Chain<T, U> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
if !self.done_first {
/// Reader adaptor which limits the bytes read from an underlying reader.
///
/// For more information, see `ReadExt::take`.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Take<T> {
inner: T,
limit: u64,
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Take<T> {
/// Returns the number of bytes that can be read before this instance will
/// return EOF.
///
/// This instance may reach EOF after reading fewer bytes than indicated by
/// this method if the underlying `Read` instance reaches EOF.
+ #[stable(feature = "rust1", since = "1.0.0")]
pub fn limit(&self) -> u64 { self.limit }
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Read> Read for Take<T> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
// Don't call into inner reader at all at EOF because it may still block
}
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<T: BufRead> BufRead for Take<T> {
fn fill_buf(&mut self) -> Result<&[u8]> {
let buf = try!(self.inner.fill_buf());
/// A bridge from implementations of `Read` to an `Iterator` of `u8`.
///
/// See `ReadExt::bytes` for more information.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Bytes<R> {
inner: R,
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<R: Read> Iterator for Bytes<R> {
type Item = Result<u8>;
/// byte.
///
/// See `BufReadExt::lines` for more information.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Lines<B> {
buf: B,
}
+#[stable(feature = "rust1", since = "1.0.0")]
impl<B: BufRead> Iterator for Lines<B> {
type Item = Result<String>;
struct R;
impl Read for R {
- fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ fn read(&mut self, _: &mut [u8]) -> io::Result<usize> {
Err(io::Error::new(io::ErrorKind::Other, "", None))
}
}
use prelude::v1::*;
-use io::{self, Read, Write, ErrorKind};
+use io::{self, Read, Write, ErrorKind, BufRead};
/// Copies the entire contents of a reader into a writer.
///
/// This function will return an error immediately if any call to `read` or
/// `write` returns an error. All instances of `ErrorKind::Interrupted` are
/// handled by this function and the underlying operation is retried.
+#[stable(feature = "rust1", since = "1.0.0")]
pub fn copy<R: Read, W: Write>(r: &mut R, w: &mut W) -> io::Result<u64> {
let mut buf = [0; super::DEFAULT_BUF_SIZE];
let mut written = 0;
}
/// A reader which is always at EOF.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Empty { _priv: () }
/// Creates an instance of an empty reader.
///
/// All reads from the returned reader will return `Ok(0)`.
+#[stable(feature = "rust1", since = "1.0.0")]
pub fn empty() -> Empty { Empty { _priv: () } }
+#[stable(feature = "rust1", since = "1.0.0")]
impl Read for Empty {
fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> { Ok(0) }
}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl BufRead for Empty {
+ fn fill_buf(&mut self) -> io::Result<&[u8]> { Ok(&[]) }
+ fn consume(&mut self, _n: usize) {}
+}
/// A reader which infinitely yields one byte.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Repeat { byte: u8 }
/// Creates an instance of a reader that infinitely repeats one byte.
///
/// All reads from this reader will succeed by filling the specified buffer with
/// the given byte.
+#[stable(feature = "rust1", since = "1.0.0")]
pub fn repeat(byte: u8) -> Repeat { Repeat { byte: byte } }
+#[stable(feature = "rust1", since = "1.0.0")]
impl Read for Repeat {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
for slot in buf.iter_mut() {
}
/// A writer which will move data into the void.
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct Sink { _priv: () }
/// Creates an instance of a writer which will successfully consume all data.
///
/// All calls to `write` on the returned instance will return `Ok(buf.len())`
/// and the contents of the buffer will not be inspected.
+#[stable(feature = "rust1", since = "1.0.0")]
pub fn sink() -> Sink { Sink { _priv: () } }
+#[stable(feature = "rust1", since = "1.0.0")]
impl Write for Sink {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> { Ok(buf.len()) }
fn flush(&mut self) -> io::Result<()> { Ok(()) }
#![feature(hash)]
#![feature(int_uint)]
#![feature(unique)]
-#![cfg_attr(test, feature(test, rustc_private, env))]
+#![feature(allow_internal_unstable)]
+#![cfg_attr(test, feature(test, rustc_private))]
// Don't link to std. We are std.
#![feature(no_std)]
#[test]
fn to_socket_addr_ipaddr_u16() {
let a = IpAddr::new_v4(77, 88, 21, 11);
- let p = 12345u16;
+ let p = 12345;
let e = SocketAddr::new(a, p);
assert_eq!(Ok(vec![e]), tsa((a, p)));
}
#[test]
fn to_socket_addr_str_u16() {
let a = SocketAddr::new(IpAddr::new_v4(77, 88, 21, 11), 24352);
- assert_eq!(Ok(vec![a]), tsa(("77.88.21.11", 24352u16)));
+ assert_eq!(Ok(vec![a]), tsa(("77.88.21.11", 24352)));
let a = SocketAddr::new(IpAddr::new_v6(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1), 53);
assert_eq!(Ok(vec![a]), tsa(("2a02:6b8:0:1::1", 53)));
let a = SocketAddr::new(IpAddr::new_v4(127, 0, 0, 1), 23924);
- assert!(tsa(("localhost", 23924u16)).unwrap().contains(&a));
+ assert!(tsa(("localhost", 23924)).unwrap().contains(&a));
}
#[test]
}
fn read_number_impl(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {
- let mut r = 0u32;
+ let mut r = 0;
let mut digit_count = 0;
loop {
match self.read_digit(radix) {
}
fn read_ipv4_addr_impl(&mut self) -> Option<Ipv4Addr> {
- let mut bs = [0u8; 4];
+ let mut bs = [0; 4];
let mut i = 0;
while i < 4 {
if i != 0 && self.read_given_char('.').is_none() {
fn read_ipv6_addr_impl(&mut self) -> Option<Ipv6Addr> {
fn ipv6_addr_from_head_tail(head: &[u16], tail: &[u16]) -> Ipv6Addr {
assert!(head.len() + tail.len() <= 8);
- let mut gs = [0u16; 8];
+ let mut gs = [0; 8];
gs.clone_from_slice(head);
gs[(8 - tail.len()) .. 8].clone_from_slice(tail);
Ipv6Addr::new(gs[0], gs[1], gs[2], gs[3], gs[4], gs[5], gs[6], gs[7])
(i, false)
}
- let mut head = [0u16; 8];
+ let mut head = [0; 8];
let (head_size, head_ipv4) = read_groups(self, &mut head, 8);
if head_size == 8 {
return None;
}
- let mut tail = [0u16; 8];
+ let mut tail = [0; 8];
let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size);
Some(ipv6_addr_from_head_tail(&head[..head_size], &tail[..tail_size]))
}
#[test]
fn test_integer_decode() {
- assert_eq!(3.14159265359f32.integer_decode(), (13176795u64, -22i16, 1i8));
- assert_eq!((-8573.5918555f32).integer_decode(), (8779358u64, -10i16, -1i8));
- assert_eq!(2f32.powf(100.0).integer_decode(), (8388608u64, 77i16, 1i8));
- assert_eq!(0f32.integer_decode(), (0u64, -150i16, 1i8));
- assert_eq!((-0f32).integer_decode(), (0u64, -150i16, -1i8));
- assert_eq!(INFINITY.integer_decode(), (8388608u64, 105i16, 1i8));
- assert_eq!(NEG_INFINITY.integer_decode(), (8388608u64, 105i16, -1i8));
- assert_eq!(NAN.integer_decode(), (12582912u64, 105i16, 1i8));
+ assert_eq!(3.14159265359f32.integer_decode(), (13176795, -22, 1));
+ assert_eq!((-8573.5918555f32).integer_decode(), (8779358, -10, -1));
+ assert_eq!(2f32.powf(100.0).integer_decode(), (8388608, 77, 1));
+ assert_eq!(0f32.integer_decode(), (0, -150, 1));
+ assert_eq!((-0f32).integer_decode(), (0, -150, -1));
+ assert_eq!(INFINITY.integer_decode(), (8388608, 105, 1));
+ assert_eq!(NEG_INFINITY.integer_decode(), (8388608, 105, -1));
+ assert_eq!(NAN.integer_decode(), (12582912, 105, 1));
}
#[test]
#[test]
fn test_integer_decode() {
- assert_eq!(3.14159265359f64.integer_decode(), (7074237752028906u64, -51i16, 1i8));
- assert_eq!((-8573.5918555f64).integer_decode(), (4713381968463931u64, -39i16, -1i8));
- assert_eq!(2f64.powf(100.0).integer_decode(), (4503599627370496u64, 48i16, 1i8));
- assert_eq!(0f64.integer_decode(), (0u64, -1075i16, 1i8));
- assert_eq!((-0f64).integer_decode(), (0u64, -1075i16, -1i8));
- assert_eq!(INFINITY.integer_decode(), (4503599627370496u64, 972i16, 1i8));
+ assert_eq!(3.14159265359f64.integer_decode(), (7074237752028906, -51, 1));
+ assert_eq!((-8573.5918555f64).integer_decode(), (4713381968463931, -39, -1));
+ assert_eq!(2f64.powf(100.0).integer_decode(), (4503599627370496, 48, 1));
+ assert_eq!(0f64.integer_decode(), (0, -1075, 1));
+ assert_eq!((-0f64).integer_decode(), (0, -1075, -1));
+ assert_eq!(INFINITY.integer_decode(), (4503599627370496, 972, 1));
assert_eq!(NEG_INFINITY.integer_decode(), (4503599627370496, 972, -1));
- assert_eq!(NAN.integer_decode(), (6755399441055744u64, 972i16, 1i8));
+ assert_eq!(NAN.integer_decode(), (6755399441055744, 972, 1));
}
#[test]
///
/// let num = 2.0f32;
///
- /// // (8388608u64, -22i16, 1i8)
+ /// // (8388608, -22, 1)
/// let (mantissa, exponent, sign) = num.integer_decode();
/// let sign_f = sign as f32;
/// let mantissa_f = mantissa as f32;
#[test]
fn test_uint_to_str_overflow() {
- let mut u8_val: u8 = 255_u8;
+ let mut u8_val: u8 = 255;
assert_eq!(u8_val.to_string(), "255");
u8_val = u8_val.wrapping_add(1);
assert_eq!(u8_val.to_string(), "0");
- let mut u16_val: u16 = 65_535_u16;
+ let mut u16_val: u16 = 65_535;
assert_eq!(u16_val.to_string(), "65535");
u16_val = u16_val.wrapping_add(1);
assert_eq!(u16_val.to_string(), "0");
- let mut u32_val: u32 = 4_294_967_295_u32;
+ let mut u32_val: u32 = 4_294_967_295;
assert_eq!(u32_val.to_string(), "4294967295");
u32_val = u32_val.wrapping_add(1);
assert_eq!(u32_val.to_string(), "0");
- let mut u64_val: u64 = 18_446_744_073_709_551_615_u64;
+ let mut u64_val: u64 = 18_446_744_073_709_551_615;
assert_eq!(u64_val.to_string(), "18446744073709551615");
u64_val = u64_val.wrapping_add(1);
#[test]
fn test_uint_from_str_overflow() {
- let mut u8_val: u8 = 255_u8;
+ let mut u8_val: u8 = 255;
assert_eq!(from_str::<u8>("255"), Some(u8_val));
assert_eq!(from_str::<u8>("256"), None);
assert_eq!(from_str::<u8>("0"), Some(u8_val));
assert_eq!(from_str::<u8>("-1"), None);
- let mut u16_val: u16 = 65_535_u16;
+ let mut u16_val: u16 = 65_535;
assert_eq!(from_str::<u16>("65535"), Some(u16_val));
assert_eq!(from_str::<u16>("65536"), None);
assert_eq!(from_str::<u16>("0"), Some(u16_val));
assert_eq!(from_str::<u16>("-1"), None);
- let mut u32_val: u32 = 4_294_967_295_u32;
+ let mut u32_val: u32 = 4_294_967_295;
assert_eq!(from_str::<u32>("4294967295"), Some(u32_val));
assert_eq!(from_str::<u32>("4294967296"), None);
assert_eq!(from_str::<u32>("0"), Some(u32_val));
assert_eq!(from_str::<u32>("-1"), None);
- let mut u64_val: u64 = 18_446_744_073_709_551_615_u64;
+ let mut u64_val: u64 = 18_446_744_073_709_551_615;
assert_eq!(from_str::<u64>("18446744073709551615"), Some(u64_val));
assert_eq!(from_str::<u64>("18446744073709551616"), None);
// This is just for integral types, the largest of which is a u64. The
// smallest base that we can have is 2, so the most number of digits we're
// ever going to have is 64
- let mut buf = [0u8; 64];
+ let mut buf = [0; 64];
let mut cur = 0;
// Loop at least once to make sure at least a `0` gets emitted.
let radix_gen: T = num::cast(radix as int).unwrap();
let (num, exp) = match exp_format {
- ExpNone => (num, 0i32),
+ ExpNone => (num, 0),
ExpDec | ExpBin => {
if num == _0 {
- (num, 0i32)
+ (num, 0)
} else {
let (exp, exp_base) = match exp_format {
ExpDec => (num.abs().log10().floor(), num::cast::<f64, T>(10.0f64).unwrap()),
#[test]
fn test_int_to_str_overflow() {
- let mut i8_val: i8 = 127_i8;
+ let mut i8_val: i8 = 127;
assert_eq!(i8_val.to_string(), "127");
i8_val = i8_val.wrapping_add(1);
assert_eq!(i8_val.to_string(), "-128");
- let mut i16_val: i16 = 32_767_i16;
+ let mut i16_val: i16 = 32_767;
assert_eq!(i16_val.to_string(), "32767");
i16_val = i16_val.wrapping_add(1);
assert_eq!(i16_val.to_string(), "-32768");
- let mut i32_val: i32 = 2_147_483_647_i32;
+ let mut i32_val: i32 = 2_147_483_647;
assert_eq!(i32_val.to_string(), "2147483647");
i32_val = i32_val.wrapping_add(1);
assert_eq!(i32_val.to_string(), "-2147483648");
- let mut i64_val: i64 = 9_223_372_036_854_775_807_i64;
+ let mut i64_val: i64 = 9_223_372_036_854_775_807;
assert_eq!(i64_val.to_string(), "9223372036854775807");
i64_val = i64_val.wrapping_add(1);
#[test]
fn read_char_buffered() {
- let buf = [195u8, 159u8];
+ let buf = [195, 159];
let mut reader = BufferedReader::with_capacity(1, &buf[..]);
assert_eq!(reader.read_char(), Ok('ß'));
}
#[test]
fn test_chars() {
- let buf = [195u8, 159u8, b'a'];
+ let buf = [195, 159, b'a'];
let mut reader = BufferedReader::with_capacity(1, &buf[..]);
let mut it = reader.chars();
assert_eq!(it.next(), Some(Ok('ß')));
/// # drop(tx);
/// let mut reader = ChanReader::new(rx);
///
-/// let mut buf = [0u8; 100];
+/// let mut buf = [0; 100];
/// match reader.read(&mut buf) {
/// Ok(nread) => println!("Read {} bytes", nread),
/// Err(e) => println!("read error: {}", e),
fn test_rx_reader() {
let (tx, rx) = channel();
thread::spawn(move|| {
- tx.send(vec![1u8, 2u8]).unwrap();
+ tx.send(vec![1, 2]).unwrap();
tx.send(vec![]).unwrap();
- tx.send(vec![3u8, 4u8]).unwrap();
- tx.send(vec![5u8, 6u8]).unwrap();
- tx.send(vec![7u8, 8u8]).unwrap();
+ tx.send(vec![3, 4]).unwrap();
+ tx.send(vec![5, 6]).unwrap();
+ tx.send(vec![7, 8]).unwrap();
});
let mut reader = ChanReader::new(rx);
- let mut buf = [0u8; 3];
+ let mut buf = [0; 3];
assert_eq!(Ok(0), reader.read(&mut []));
let mut writer = ChanWriter::new(tx);
writer.write_be_u32(42).unwrap();
- let wanted = vec![0u8, 0u8, 0u8, 42u8];
+ let wanted = vec![0, 0, 0, 42];
let got = thread::scoped(move|| { rx.recv().unwrap() }).join();
assert_eq!(wanted, got);
let mut i = size;
let mut n = n;
while i > 0 {
- bytes.push((n & 255_u64) as u8);
+ bytes.push((n & 255) as u8);
n >>= 8;
i -= 1;
}
panic!("index out of bounds");
}
- let mut buf = [0u8; 8];
+ let mut buf = [0; 8];
unsafe {
let ptr = data.as_ptr().offset(start as int);
let out = buf.as_mut_ptr();
({
use super::u64_from_be_bytes;
- let data = (0u8..$stride*100+$start_index).collect::<Vec<_>>();
- let mut sum = 0u64;
+ let data = (0..$stride*100+$start_index).collect::<Vec<_>>();
+ let mut sum = 0;
$b.iter(|| {
let mut i = $start_index;
while i < data.len() {
/// attempted against it for which its underlying file descriptor was not
/// configured at creation time, via the `FileAccess` parameter to
/// `File::open_mode()`.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::File")]
+#[unstable(feature = "old_io")]
pub struct File {
fd: fs_imp::FileDesc,
path: Path,
}
}
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs")]
+#[unstable(feature = "old_io")]
impl File {
/// Open a file at `path` in the mode specified by the `mode` and `access`
/// arguments
/// * Attempting to open a file with a `FileAccess` that the user lacks
/// permissions for
/// * Filesystem-level errors (full disk, etc)
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs::OpenOptions")]
+ #[unstable(feature = "old_io")]
pub fn open_mode(path: &Path,
mode: FileMode,
access: FileAccess) -> IoResult<File> {
///
/// let contents = File::open(&Path::new("foo.txt")).read_to_end();
/// ```
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs::File::open")]
+ #[unstable(feature = "old_io")]
pub fn open(path: &Path) -> IoResult<File> {
File::open_mode(path, Open, Read)
}
/// # drop(f);
/// # ::std::old_io::fs::unlink(&Path::new("foo.txt"));
/// ```
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs::File::create")]
+ #[unstable(feature = "old_io")]
pub fn create(path: &Path) -> IoResult<File> {
File::open_mode(path, Truncate, Write)
.update_desc("couldn't create file")
}
/// Returns the original path that was used to open this file.
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs")]
+ #[unstable(feature = "old_io")]
pub fn path<'a>(&'a self) -> &'a Path {
&self.path
}
/// Synchronizes all modifications to this file to its permanent storage
/// device. This will flush any internal buffers necessary to perform this
/// operation.
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs")]
+ #[unstable(feature = "old_io")]
pub fn fsync(&mut self) -> IoResult<()> {
self.fd.fsync()
.update_err("couldn't fsync file",
/// file metadata to the filesystem. This is intended for use cases that
/// must synchronize content, but don't need the metadata on disk. The goal
/// of this method is to reduce disk operations.
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs")]
+ #[unstable(feature = "old_io")]
pub fn datasync(&mut self) -> IoResult<()> {
self.fd.datasync()
.update_err("couldn't datasync file",
/// be shrunk. If it is greater than the current file's size, then the file
/// will be extended to `size` and have all of the intermediate data filled
/// in with 0s.
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs")]
+ #[unstable(feature = "old_io")]
pub fn truncate(&mut self, size: i64) -> IoResult<()> {
self.fd.truncate(size)
.update_err("couldn't truncate file", |e|
/// until you have attempted to read past the end of the file, so if
/// you've read _exactly_ the number of bytes in the file, this will
/// return `false`, not `true`.
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs")]
+ #[unstable(feature = "old_io")]
pub fn eof(&self) -> bool {
self.last_nread == 0
}
/// Queries information about the underlying file.
+ #[deprecated(since = "1.0.0", reason = "replaced with std::fs")]
+ #[unstable(feature = "old_io")]
pub fn stat(&self) -> IoResult<FileStat> {
self.fd.fstat()
.update_err("couldn't fstat file", |e|
/// This function will return an error if `path` points to a directory, if the
/// user lacks permissions to remove the file, or if some other filesystem-level
/// error occurs.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::remove_file")]
+#[unstable(feature = "old_io")]
pub fn unlink(path: &Path) -> IoResult<()> {
fs_imp::unlink(path)
.update_err("couldn't unlink path", |e|
/// This function will return an error if the user lacks the requisite permissions
/// to perform a `stat` call on the given `path` or if there is no entry in the
/// filesystem at the provided path.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::metadata")]
+#[unstable(feature = "old_io")]
pub fn stat(path: &Path) -> IoResult<FileStat> {
fs_imp::stat(path)
.update_err("couldn't stat path", |e|
/// # Error
///
/// See `stat`
+#[unstable(feature = "old_fs")]
pub fn lstat(path: &Path) -> IoResult<FileStat> {
fs_imp::lstat(path)
.update_err("couldn't lstat path", |e|
/// This function will return an error if the provided `from` doesn't exist, if
/// the process lacks permissions to view the contents, or if some other
/// intermittent I/O error occurs.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::rename")]
+#[unstable(feature = "old_io")]
pub fn rename(from: &Path, to: &Path) -> IoResult<()> {
fs_imp::rename(from, to)
.update_err("couldn't rename path", |e|
/// Note that this copy is not atomic in that once the destination is
/// ensured to not exist, there is nothing preventing the destination from
/// being created and then destroyed by this operation.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::copy")]
+#[unstable(feature = "old_io")]
pub fn copy(from: &Path, to: &Path) -> IoResult<()> {
fn update_err<T>(result: IoResult<T>, from: &Path, to: &Path) -> IoResult<T> {
result.update_err("couldn't copy path", |e| {
/// This function will return an error if the provided `path` doesn't exist, if
/// the process lacks permissions to change the attributes of the file, or if
/// some other I/O error is encountered.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::set_permissions")]
+#[unstable(feature = "old_io")]
pub fn chmod(path: &Path, mode: old_io::FilePermission) -> IoResult<()> {
fs_imp::chmod(path, mode.bits() as uint)
.update_err("couldn't chmod path", |e|
}
/// Change the user and group owners of a file at the specified path.
+#[unstable(feature = "old_fs")]
pub fn chown(path: &Path, uid: int, gid: int) -> IoResult<()> {
fs_imp::chown(path, uid, gid)
.update_err("couldn't chown path", |e|
/// Creates a new hard link on the filesystem. The `dst` path will be a
/// link pointing to the `src` path. Note that systems often require these
/// two paths to both be located on the same filesystem.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::hard_link")]
+#[unstable(feature = "old_io")]
pub fn link(src: &Path, dst: &Path) -> IoResult<()> {
fs_imp::link(src, dst)
.update_err("couldn't link path", |e|
/// Creates a new symbolic link on the filesystem. The `dst` path will be a
/// symlink pointing to the `src` path.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::soft_link")]
+#[unstable(feature = "old_io")]
pub fn symlink(src: &Path, dst: &Path) -> IoResult<()> {
fs_imp::symlink(src, dst)
.update_err("couldn't symlink path", |e|
///
/// This function will return an error on failure. Failure conditions include
/// reading a file that does not exist or reading a file that is not a symlink.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::read_link")]
+#[unstable(feature = "old_io")]
pub fn readlink(path: &Path) -> IoResult<Path> {
fs_imp::readlink(path)
.update_err("couldn't resolve symlink for path", |e|
///
/// This function will return an error if the user lacks permissions to make a
/// new directory at the provided `path`, or if the directory already exists.
+#[unstable(feature = "old_fs")]
pub fn mkdir(path: &Path, mode: FilePermission) -> IoResult<()> {
fs_imp::mkdir(path, mode.bits() as uint)
.update_err("couldn't create directory", |e|
///
/// This function will return an error if the user lacks permissions to remove
/// the directory at the provided `path`, or if the directory isn't empty.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::remove_dir")]
+#[unstable(feature = "old_io")]
pub fn rmdir(path: &Path) -> IoResult<()> {
fs_imp::rmdir(path)
.update_err("couldn't remove directory", |e|
/// This function will return an error if the provided `path` doesn't exist, if
/// the process lacks permissions to view the contents or if the `path` points
/// at a non-directory file
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::read_dir")]
+#[unstable(feature = "old_io")]
pub fn readdir(path: &Path) -> IoResult<Vec<Path>> {
fs_imp::readdir(path)
.update_err("couldn't read directory",
/// rooted at `path`. The path given will not be iterated over, and this will
/// perform iteration in some top-down order. The contents of unreadable
/// subdirectories are ignored.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::walk_dir")]
+#[unstable(feature = "old_io")]
pub fn walk_dir(path: &Path) -> IoResult<Directories> {
Ok(Directories {
stack: try!(readdir(path).update_err("couldn't walk directory",
/// An iterator that walks over a directory
#[derive(Clone)]
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::ReadDir")]
+#[unstable(feature = "old_io")]
pub struct Directories {
stack: Vec<Path>,
}
/// # Error
///
/// See `fs::mkdir`.
+#[unstable(feature = "old_fs")]
pub fn mkdir_recursive(path: &Path, mode: FilePermission) -> IoResult<()> {
// tjc: if directory exists but with different permissions,
// should we return false?
/// # Error
///
/// See `file::unlink` and `fs::readdir`
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::remove_dir_all")]
+#[unstable(feature = "old_io")]
pub fn rmdir_recursive(path: &Path) -> IoResult<()> {
let mut rm_stack = Vec::new();
rm_stack.push(path.clone());
/// `atime` and its modification time set to `mtime`. The times specified should
/// be in milliseconds.
// FIXME(#10301) these arguments should not be u64
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::set_file_times")]
+#[unstable(feature = "old_io")]
pub fn change_file_times(path: &Path, atime: u64, mtime: u64) -> IoResult<()> {
fs_imp::utime(path, atime, mtime)
.update_err("couldn't change_file_times", |e|
}
/// Utility methods for paths.
+#[deprecated(since = "1.0.0", reason = "replaced with std::fs::PathExt")]
+#[unstable(feature = "old_io")]
pub trait PathExtensions {
/// Get information on the file, directory, etc at this path.
///
check!(w.write(msg));
}
let files = check!(readdir(dir));
- let mut mem = [0u8; 4];
+ let mut mem = [0; 4];
for f in &files {
{
let n = f.filestem_str();
check!(File::create(&dir2.join("14")));
let mut files = check!(walk_dir(dir));
- let mut cur = [0u8; 2];
+ let mut cur = [0; 2];
for f in files {
let stem = f.filestem_str().unwrap();
let root = stem.as_bytes()[0] - b'0';
fn read_le_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
assert!(nbytes > 0 && nbytes <= 8);
- let mut val = 0u64;
+ let mut val = 0;
let mut pos = 0;
let mut i = nbytes;
while i > 0 {
fn read_be_uint_n(&mut self, nbytes: uint) -> IoResult<u64> {
assert!(nbytes > 0 && nbytes <= 8);
- let mut val = 0u64;
+ let mut val = 0;
let mut i = nbytes;
while i > 0 {
i -= 1;
/// Write a single char, encoded as UTF-8.
#[inline]
fn write_char(&mut self, c: char) -> IoResult<()> {
- let mut buf = [0u8; 4];
+ let mut buf = [0; 4];
let n = c.encode_utf8(&mut buf).unwrap_or(0);
self.write_all(&buf[..n])
}
fn test_read_at_least() {
let mut r = BadReader::new(MemReader::new(b"hello, world!".to_vec()),
vec![GoodBehavior(usize::MAX)]);
- let buf = &mut [0u8; 5];
+ let buf = &mut [0; 5];
assert!(r.read_at_least(1, buf).unwrap() >= 1);
assert!(r.read_exact(5).unwrap().len() == 5); // read_exact uses read_at_least
assert!(r.read_at_least(0, buf).is_ok());
}
fn read_number_impl(&mut self, radix: u8, max_digits: u32, upto: u32) -> Option<u32> {
- let mut r = 0u32;
+ let mut r = 0;
let mut digit_count = 0;
loop {
match self.read_digit(radix) {
}
fn read_ipv4_addr_impl(&mut self) -> Option<IpAddr> {
- let mut bs = [0u8; 4];
+ let mut bs = [0; 4];
let mut i = 0;
while i < 4 {
if i != 0 && self.read_given_char('.').is_none() {
fn read_ipv6_addr_impl(&mut self) -> Option<IpAddr> {
fn ipv6_addr_from_head_tail(head: &[u16], tail: &[u16]) -> IpAddr {
assert!(head.len() + tail.len() <= 8);
- let mut gs = [0u16; 8];
+ let mut gs = [0; 8];
gs.clone_from_slice(head);
gs[(8 - tail.len()) .. 8].clone_from_slice(tail);
Ipv6Addr(gs[0], gs[1], gs[2], gs[3], gs[4], gs[5], gs[6], gs[7])
(i, false)
}
- let mut head = [0u16; 8];
+ let mut head = [0; 8];
let (head_size, head_ipv4) = read_groups(self, &mut head, 8);
if head_size == 8 {
return None;
}
- let mut tail = [0u16; 8];
+ let mut tail = [0; 8];
let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size);
Some(ipv6_addr_from_head_tail(&head[..head_size], &tail[..tail_size]))
}
/// // The following lines are equivalent modulo possible "localhost" name resolution
/// // differences
/// let tcp_s = TcpStream::connect(SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 12345 });
-/// let tcp_s = TcpStream::connect((Ipv4Addr(127, 0, 0, 1), 12345u16));
-/// let tcp_s = TcpStream::connect(("127.0.0.1", 12345u16));
-/// let tcp_s = TcpStream::connect(("localhost", 12345u16));
+/// let tcp_s = TcpStream::connect((Ipv4Addr(127, 0, 0, 1), 12345));
+/// let tcp_s = TcpStream::connect(("127.0.0.1", 12345));
+/// let tcp_s = TcpStream::connect(("localhost", 12345));
/// let tcp_s = TcpStream::connect("127.0.0.1:12345");
/// let tcp_s = TcpStream::connect("localhost:12345");
///
/// // TcpListener::bind(), UdpSocket::bind() and UdpSocket::send_to() behave similarly
/// let tcp_l = TcpListener::bind("localhost:12345");
///
-/// let mut udp_s = UdpSocket::bind(("127.0.0.1", 23451u16)).unwrap();
-/// udp_s.send_to([7u8, 7u8, 7u8].as_slice(), (Ipv4Addr(127, 0, 0, 1), 23451u16));
+/// let mut udp_s = UdpSocket::bind(("127.0.0.1", 23451)).unwrap();
+/// udp_s.send_to([7, 7, 7].as_slice(), (Ipv4Addr(127, 0, 0, 1), 23451));
/// }
/// ```
pub trait ToSocketAddr {
#[test]
fn to_socket_addr_ipaddr_u16() {
let a = Ipv4Addr(77, 88, 21, 11);
- let p = 12345u16;
+ let p = 12345;
let e = SocketAddr { ip: a, port: p };
assert_eq!(Ok(e), (a, p).to_socket_addr());
assert_eq!(Ok(vec![e]), (a, p).to_socket_addr_all());
#[test]
fn to_socket_addr_str_u16() {
let a = SocketAddr { ip: Ipv4Addr(77, 88, 21, 11), port: 24352 };
- assert_eq!(Ok(a), ("77.88.21.11", 24352u16).to_socket_addr());
- assert_eq!(Ok(vec![a]), ("77.88.21.11", 24352u16).to_socket_addr_all());
+ assert_eq!(Ok(a), ("77.88.21.11", 24352).to_socket_addr());
+ assert_eq!(Ok(vec![a]), ("77.88.21.11", 24352).to_socket_addr_all());
let a = SocketAddr { ip: Ipv6Addr(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1), port: 53 };
assert_eq!(Ok(a), ("2a02:6b8:0:1::1", 53).to_socket_addr());
assert_eq!(Ok(vec![a]), ("2a02:6b8:0:1::1", 53).to_socket_addr_all());
let a = SocketAddr { ip: Ipv4Addr(127, 0, 0, 1), port: 23924 };
- assert!(("localhost", 23924u16).to_socket_addr_all().unwrap().contains(&a));
+ assert!(("localhost", 23924).to_socket_addr_all().unwrap().contains(&a));
}
#[test]
tx.send(TcpStream::connect(addr).unwrap()).unwrap();
});
let _l = rx.recv().unwrap();
- for i in 0i32..1001 {
+ for i in 0..1001 {
match a.accept() {
Ok(..) => break,
Err(ref e) if e.kind == TimedOut => {}
assert_eq!(s.read(&mut [0]).err().unwrap().kind, TimedOut);
s.set_timeout(Some(20));
- for i in 0i32..1001 {
+ for i in 0..1001 {
match s.write(&[0; 128 * 1024]) {
Ok(()) | Err(IoError { kind: ShortWrite(..), .. }) => {},
Err(IoError { kind: TimedOut, .. }) => break,
let mut s = a.accept().unwrap();
s.set_write_timeout(Some(20));
- for i in 0i32..1001 {
+ for i in 0..1001 {
match s.write(&[0; 128 * 1024]) {
Ok(()) | Err(IoError { kind: ShortWrite(..), .. }) => {},
Err(IoError { kind: TimedOut, .. }) => break,
*/
fn base_port() -> u16 {
- let base = 9600u16;
- let range = 1000u16;
+ let base = 9600;
+ let range = 1000;
let bases = [
("32-opt", base + range * 1),
//! Utility implementations of Reader and Writer
+#![allow(deprecated)]
+
use prelude::v1::*;
use cmp;
use old_io;
/// Wraps a `Reader`, limiting the number of bytes that can be read from it.
#[derive(Debug)]
+#[deprecated(since = "1.0.0", reason = "use std::io::Take")]
+#[unstable(feature = "old_io")]
pub struct LimitReader<R> {
limit: uint,
inner: R
}
+#[deprecated(since = "1.0.0", reason = "use std::io::Take")]
+#[unstable(feature = "old_io")]
impl<R: Reader> LimitReader<R> {
/// Creates a new `LimitReader`
+ #[deprecated(since = "1.0.0", reason = "use std::io's take method instead")]
+ #[unstable(feature = "old_io")]
pub fn new(r: R, limit: uint) -> LimitReader<R> {
LimitReader { limit: limit, inner: r }
}
pub fn limit(&self) -> uint { self.limit }
}
+#[deprecated(since = "1.0.0", reason = "use std::io's take method instead")]
+#[unstable(feature = "old_io")]
impl<R: Reader> Reader for LimitReader<R> {
fn read(&mut self, buf: &mut [u8]) -> old_io::IoResult<uint> {
if self.limit == 0 {
}
}
+#[deprecated(since = "1.0.0", reason = "use std::io's take method instead")]
+#[unstable(feature = "old_io")]
impl<R: Buffer> Buffer for LimitReader<R> {
fn fill_buf<'a>(&'a mut self) -> old_io::IoResult<&'a [u8]> {
let amt = try!(self.inner.fill_buf());
/// A `Writer` which ignores bytes written to it, like /dev/null.
#[derive(Copy, Debug)]
+#[deprecated(since = "1.0.0", reason = "use std::io::sink() instead")]
+#[unstable(feature = "old_io")]
pub struct NullWriter;
+#[deprecated(since = "1.0.0", reason = "use std::io::sink() instead")]
+#[unstable(feature = "old_io")]
impl Writer for NullWriter {
#[inline]
fn write_all(&mut self, _buf: &[u8]) -> old_io::IoResult<()> { Ok(()) }
/// A `Reader` which returns an infinite stream of 0 bytes, like /dev/zero.
#[derive(Copy, Debug)]
+#[deprecated(since = "1.0.0", reason = "use std::io::repeat(0) instead")]
+#[unstable(feature = "old_io")]
pub struct ZeroReader;
+#[deprecated(since = "1.0.0", reason = "use std::io::repeat(0) instead")]
+#[unstable(feature = "old_io")]
impl Reader for ZeroReader {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> old_io::IoResult<uint> {
}
}
+#[deprecated(since = "1.0.0", reason = "use std::io::repeat(0) instead")]
+#[unstable(feature = "old_io")]
impl Buffer for ZeroReader {
fn fill_buf<'a>(&'a mut self) -> old_io::IoResult<&'a [u8]> {
static DATA: [u8; 64] = [0; 64];
/// A `Reader` which is always at EOF, like /dev/null.
#[derive(Copy, Debug)]
+#[deprecated(since = "1.0.0", reason = "use std::io::empty() instead")]
+#[unstable(feature = "old_io")]
pub struct NullReader;
+#[deprecated(since = "1.0.0", reason = "use std::io::empty() instead")]
+#[unstable(feature = "old_io")]
impl Reader for NullReader {
#[inline]
fn read(&mut self, _buf: &mut [u8]) -> old_io::IoResult<uint> {
}
}
+#[deprecated(since = "1.0.0", reason = "use std::io::empty() instead")]
+#[unstable(feature = "old_io")]
impl Buffer for NullReader {
fn fill_buf<'a>(&'a mut self) -> old_io::IoResult<&'a [u8]> {
Err(old_io::standard_error(old_io::EndOfFile))
/// The `Writer`s are delegated to in order. If any `Writer` returns an error,
/// that error is returned immediately and remaining `Writer`s are not called.
#[derive(Debug)]
+#[deprecated(since = "1.0.0", reason = "use std::io::Broadcast instead")]
+#[unstable(feature = "old_io")]
pub struct MultiWriter<W> {
writers: Vec<W>
}
impl<W> MultiWriter<W> where W: Writer {
/// Creates a new `MultiWriter`
+ #[deprecated(since = "1.0.0", reason = "use std::io's broadcast method instead")]
+ #[unstable(feature = "old_io")]
pub fn new(writers: Vec<W>) -> MultiWriter<W> {
MultiWriter { writers: writers }
}
}
+#[deprecated(since = "1.0.0", reason = "use std::io::Broadcast instead")]
+#[unstable(feature = "old_io")]
impl<W> Writer for MultiWriter<W> where W: Writer {
#[inline]
fn write_all(&mut self, buf: &[u8]) -> old_io::IoResult<()> {
/// A `Reader` which chains input from multiple `Reader`s, reading each to
/// completion before moving onto the next.
#[derive(Clone, Debug)]
+#[deprecated(since = "1.0.0", reason = "use std::io::Chain instead")]
+#[unstable(feature = "old_io")]
pub struct ChainedReader<I, R> {
readers: I,
cur_reader: Option<R>,
impl<R: Reader, I: Iterator<Item=R>> ChainedReader<I, R> {
/// Creates a new `ChainedReader`
+ #[deprecated(since = "1.0.0", reason = "use std::io's chain method instead")]
+ #[unstable(feature = "old_io")]
pub fn new(mut readers: I) -> ChainedReader<I, R> {
let r = readers.next();
ChainedReader { readers: readers, cur_reader: r }
}
}
+#[deprecated(since = "1.0.0", reason = "use std::io::Chain instead")]
+#[unstable(feature = "old_io")]
impl<R: Reader, I: Iterator<Item=R>> Reader for ChainedReader<I, R> {
fn read(&mut self, buf: &mut [u8]) -> old_io::IoResult<uint> {
loop {
/// A `Reader` which forwards input from another `Reader`, passing it along to
/// a `Writer` as well. Similar to the `tee(1)` command.
#[derive(Debug)]
+#[deprecated(since = "1.0.0", reason = "use std::io::Tee instead")]
+#[unstable(feature = "old_io")]
pub struct TeeReader<R, W> {
reader: R,
writer: W,
}
+#[deprecated(since = "1.0.0", reason = "use std::io::Tee instead")]
+#[unstable(feature = "old_io")]
impl<R: Reader, W: Writer> TeeReader<R, W> {
/// Creates a new `TeeReader`
+ #[deprecated(since = "1.0.0", reason = "use std::io's tee method instead")]
+ #[unstable(feature = "old_io")]
pub fn new(r: R, w: W) -> TeeReader<R, W> {
TeeReader { reader: r, writer: w }
}
}
}
+#[deprecated(since = "1.0.0", reason = "use std::io::Tee instead")]
+#[unstable(feature = "old_io")]
impl<R: Reader, W: Writer> Reader for TeeReader<R, W> {
fn read(&mut self, buf: &mut [u8]) -> old_io::IoResult<uint> {
self.reader.read(buf).and_then(|len| {
}
/// Copies all data from a `Reader` to a `Writer`.
+#[deprecated(since = "1.0.0", reason = "use std::io's copy function instead")]
+#[unstable(feature = "old_io")]
pub fn copy<R: Reader, W: Writer>(r: &mut R, w: &mut W) -> old_io::IoResult<()> {
let mut buf = [0; super::DEFAULT_BUF_SIZE];
loop {
#[test]
fn test_iter_reader() {
- let mut r = IterReader::new(0u8..8);
+ let mut r = IterReader::new(0..8);
let mut buf = [0, 0, 0];
let len = r.read(&mut buf).unwrap();
assert_eq!(len, 3);
#[test]
fn iter_reader_zero_length() {
- let mut r = IterReader::new(0u8..8);
+ let mut r = IterReader::new(0..8);
let mut buf = [];
assert_eq!(Ok(0), r.read(&mut buf));
}
#[cfg(not(target_os="android"))]
#[test]
fn test_inherit_env() {
- use os;
+ use std::env;
if running_on_valgrind() { return; }
let result = env_cmd().output().unwrap();
let output = String::from_utf8(result.stdout).unwrap();
- let r = os::env();
- for &(ref k, ref v) in &r {
+ for (ref k, ref v) in env::vars() {
// don't check windows magical empty-named variables
assert!(k.is_empty() ||
output.contains(format!("{}={}", *k, *v).as_slice()),
let lengths = [0, 1, 2, 3, 4, 5, 6, 7,
80, 81, 82, 83, 84, 85, 86, 87];
for &n in &lengths {
- let mut v = repeat(0u8).take(n).collect::<Vec<_>>();
+ let mut v = repeat(0).take(n).collect::<Vec<_>>();
r.fill_bytes(&mut v);
// use this to get nicer error messages.
}
fn getrandom_next_u32() -> u32 {
- let mut buf: [u8; 4] = [0u8; 4];
+ let mut buf: [u8; 4] = [0; 4];
getrandom_fill_bytes(&mut buf);
unsafe { mem::transmute::<[u8; 4], u32>(buf) }
}
fn getrandom_next_u64() -> u64 {
- let mut buf: [u8; 8] = [0u8; 8];
+ let mut buf: [u8; 8] = [0; 8];
getrandom_fill_bytes(&mut buf);
unsafe { mem::transmute::<[u8; 8], u64>(buf) }
}
impl Rng for OsRng {
fn next_u32(&mut self) -> u32 {
- let mut v = [0u8; 4];
+ let mut v = [0; 4];
self.fill_bytes(&mut v);
unsafe { mem::transmute(v) }
}
fn next_u64(&mut self) -> u64 {
- let mut v = [0u8; 8];
+ let mut v = [0; 8];
self.fill_bytes(&mut v);
unsafe { mem::transmute(v) }
}
impl Rng for OsRng {
fn next_u32(&mut self) -> u32 {
- let mut v = [0u8; 4];
+ let mut v = [0; 4];
self.fill_bytes(&mut v);
unsafe { mem::transmute(v) }
}
fn next_u64(&mut self) -> u64 {
- let mut v = [0u8; 8];
+ let mut v = [0; 8];
self.fill_bytes(&mut v);
unsafe { mem::transmute(v) }
}
r.next_u32();
r.next_u64();
- let mut v = [0u8; 1000];
+ let mut v = [0; 1000];
r.fill_bytes(&mut v);
}
// as possible (XXX: is this a good test?)
let mut r = OsRng::new().unwrap();
thread::yield_now();
- let mut v = [0u8; 1000];
+ let mut v = [0; 1000];
for _ in 0..100 {
r.next_u32();
#[test]
fn test_reader_rng_u64() {
// transmute from the target to avoid endianness concerns.
- let v = vec![0u8, 0, 0, 0, 0, 0, 0, 1,
+ let v = vec![0, 0, 0, 0, 0, 0, 0, 1,
0 , 0, 0, 0, 0, 0, 0, 2,
0, 0, 0, 0, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
- assert_eq!(rng.next_u64(), 1_u64.to_be());
- assert_eq!(rng.next_u64(), 2_u64.to_be());
- assert_eq!(rng.next_u64(), 3_u64.to_be());
+ assert_eq!(rng.next_u64(), 1.to_be());
+ assert_eq!(rng.next_u64(), 2.to_be());
+ assert_eq!(rng.next_u64(), 3.to_be());
}
#[test]
fn test_reader_rng_u32() {
- let v = vec![0u8, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3];
+ let v = vec![0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3];
let mut rng = ReaderRng::new(MemReader::new(v));
- assert_eq!(rng.next_u32(), 1_u32.to_be());
- assert_eq!(rng.next_u32(), 2_u32.to_be());
- assert_eq!(rng.next_u32(), 3_u32.to_be());
+ assert_eq!(rng.next_u32(), 1.to_be());
+ assert_eq!(rng.next_u32(), 2.to_be());
+ assert_eq!(rng.next_u32(), 3.to_be());
}
#[test]
fn test_reader_rng_fill_bytes() {
- let v = [1u8, 2, 3, 4, 5, 6, 7, 8];
- let mut w = [0u8; 8];
+ let v = [1, 2, 3, 4, 5, 6, 7, 8];
+ let mut w = [0; 8];
let mut rng = ReaderRng::new(MemReader::new(v.to_vec()));
rng.fill_bytes(&mut w);
#[should_fail]
fn test_reader_rng_insufficient_bytes() {
let mut rng = ReaderRng::new(MemReader::new(vec!()));
- let mut v = [0u8; 3];
+ let mut v = [0; 3];
rng.fill_bytes(&mut v);
}
}
}
// Convert the arguments into a stack-allocated string
- let mut msg = [0u8; 512];
+ let mut msg = [0; 512];
let mut w = BufWriter { buf: &mut msg, pos: 0 };
let _ = write!(&mut w, "{}", args);
let msg = str::from_utf8(&w.buf[..w.pos]).unwrap_or("aborted");
//! ```
#![allow(non_camel_case_types)]
+#![unstable(feature = "thread_local_internals")]
use prelude::v1::*;
/// KEY.set(1 as *mut u8);
/// }
/// ```
-#[stable(feature = "rust1", since = "1.0.0")]
pub struct StaticKey {
/// Inner static TLS key (internals), created with by `INIT_INNER` in this
/// module.
- #[stable(feature = "rust1", since = "1.0.0")]
pub inner: StaticKeyInner,
/// Destructor for the TLS value.
///
/// See `Key::new` for information about when the destructor runs and how
/// it runs.
- #[stable(feature = "rust1", since = "1.0.0")]
pub dtor: Option<unsafe extern fn(*mut u8)>,
}
/// Constant initialization value for static TLS keys.
///
/// This value specifies no destructor by default.
-#[stable(feature = "rust1", since = "1.0.0")]
pub const INIT: StaticKey = StaticKey {
inner: INIT_INNER,
dtor: None,
/// Constant initialization value for the inner part of static TLS keys.
///
/// This value allows specific configuration of the destructor for a TLS key.
-#[stable(feature = "rust1", since = "1.0.0")]
pub const INIT_INNER: StaticKeyInner = StaticKeyInner {
key: atomic::ATOMIC_USIZE_INIT,
};
if index == slice.len() { return true; }
match slice.bytes.get(index) {
None => false,
- Some(&b) => b < 128u8 || b >= 192u8,
+ Some(&b) => b < 128 || b >= 192,
}
}
return Some(tmp);
}
- let mut buf = [0u16; 2];
+ let mut buf = [0; 2];
self.code_points.next().map(|code_point| {
let n = encode_utf16_raw(code_point.value, &mut buf)
.unwrap_or(0);
fn as_raw_fd(&self) -> Fd;
}
+#[allow(deprecated)]
impl AsRawFd for old_io::fs::File {
fn as_raw_fd(&self) -> Fd {
self.as_inner().fd()
let mut writer = FileDesc::new(writer, true);
writer.write(b"test").ok().unwrap();
- let mut buf = [0u8; 4];
+ let mut buf = [0; 4];
match reader.read(&mut buf) {
Ok(4) => {
assert_eq!(buf[0], 't' as u8);
fn as_raw_handle(&self) -> Handle;
}
+#[allow(deprecated)]
impl AsRawHandle for old_io::fs::File {
fn as_raw_handle(&self) -> Handle {
self.as_inner().handle()
use libc::{pid_t, c_void};
use libc;
use mem;
-use old_io::fs::PathExtensions;
+#[allow(deprecated)] use old_io::fs::PathExtensions;
use old_io::process::{ProcessExit, ExitStatus};
use old_io::{IoResult, IoError};
use old_io;
assert!(e.is::<T>());
let any = e.downcast::<T>().ok().unwrap();
assert!(any.is::<u16>());
- assert_eq!(*any.downcast::<u16>().ok().unwrap(), 413u16);
+ assert_eq!(*any.downcast::<u16>().ok().unwrap(), 413);
}
Ok(()) => panic!()
}
// Sure wish we had macro hygiene, no?
#[doc(hidden)]
-#[stable(feature = "rust1", since = "1.0.0")]
+#[unstable(feature = "thread_local_internals")]
pub mod __impl {
pub use super::imp::Key as KeyInner;
pub use super::imp::destroy_value;
/// Declare a new thread local storage key of type `std::thread_local::Key`.
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
+#[allow_internal_unstable]
macro_rules! thread_local {
(static $name:ident: $t:ty = $init:expr) => (
static $name: ::std::thread_local::Key<$t> = {
#[macro_export]
#[doc(hidden)]
+#[allow_internal_unstable]
macro_rules! __thread_local_inner {
(static $name:ident: $t:ty = $init:expr) => (
#[cfg_attr(all(any(target_os = "macos", target_os = "linux"),
use ptr;
#[doc(hidden)]
- #[stable(since = "1.0.0", feature = "rust1")]
+ #[unstable(feature = "thread_local_internals")]
pub struct Key<T> {
// Place the inner bits in an `UnsafeCell` to currently get around the
// "only Sync statics" restriction. This allows any type to be placed in
//
// Note that all access requires `T: 'static` so it can't be a type with
// any borrowed pointers still.
- #[stable(since = "1.0.0", feature = "rust1")]
+ #[unstable(feature = "thread_local_internals")]
pub inner: UnsafeCell<T>,
// Metadata to keep track of the state of the destructor. Remember that
// these variables are thread-local, not global.
- #[stable(since = "1.0.0", feature = "rust1")]
+ #[unstable(feature = "thread_local_internals")]
pub dtor_registered: UnsafeCell<bool>, // should be Cell
- #[stable(since = "1.0.0", feature = "rust1")]
+ #[unstable(feature = "thread_local_internals")]
pub dtor_running: UnsafeCell<bool>, // should be Cell
}
}
#[doc(hidden)]
- #[stable(feature = "rust1", since = "1.0.0")]
+ #[unstable(feature = "thread_local_internals")]
pub unsafe extern fn destroy_value<T>(ptr: *mut u8) {
let ptr = ptr as *mut Key<T>;
// Right before we run the user destructor be sure to flag the
use sys_common::thread_local::StaticKey as OsStaticKey;
#[doc(hidden)]
- #[stable(since = "1.0.0", feature = "rust1")]
+ #[unstable(feature = "thread_local_internals")]
pub struct Key<T> {
// Statically allocated initialization expression, using an `UnsafeCell`
// for the same reasons as above.
- #[stable(since = "1.0.0", feature = "rust1")]
+ #[unstable(feature = "thread_local_internals")]
pub inner: UnsafeCell<T>,
// OS-TLS key that we'll use to key off.
- #[stable(since = "1.0.0", feature = "rust1")]
+ #[unstable(feature = "thread_local_internals")]
pub os: OsStaticKey,
}
}
#[doc(hidden)]
- #[stable(feature = "rust1", since = "1.0.0")]
+ #[unstable(feature = "thread_local_internals")]
pub unsafe extern fn destroy_value<T: 'static>(ptr: *mut u8) {
// The OS TLS ensures that this key contains a NULL value when this
// destructor starts to run. We set it back to a sentinel value of 1 to
/// This macro declares a `static` item on which methods are used to get and
/// set the value stored within.
#[macro_export]
+#[allow_internal_unstable]
macro_rules! scoped_thread_local {
(static $name:ident: $t:ty) => (
__scoped_thread_local_inner!(static $name: $t);
#[macro_export]
#[doc(hidden)]
+#[allow_internal_unstable]
macro_rules! __scoped_thread_local_inner {
(static $name:ident: $t:ty) => (
#[cfg_attr(not(any(windows,
impl Duration {
/// Makes a new `Duration` with given number of weeks.
- /// Equivalent to `Duration::seconds(weeks * 7 * 24 * 60 * 60), with overflow checks.
+ /// Equivalent to `Duration::seconds(weeks * 7 * 24 * 60 * 60)` with overflow checks.
/// Panics when the duration is out of bounds.
#[inline]
#[unstable(feature = "std_misc")]
//! assert!(b == c);
//!
//! let d : (u32, f32) = Default::default();
-//! assert_eq!(d, (0u32, 0.0f32));
+//! assert_eq!(d, (0, 0.0f32));
//! ```
#![doc(primitive = "tuple")]
use codemap::{Span, Spanned, DUMMY_SP, ExpnId};
use abi::Abi;
use ast_util;
+use ext::base;
+use ext::tt::macro_parser;
use owned_slice::OwnedSlice;
use parse::token::{InternedString, str_to_ident};
use parse::token;
+use parse::lexer;
use ptr::P;
use std::fmt;
TtSequence(span, _) => span,
}
}
+
+ /// Use this token tree as a matcher to parse given tts.
+ pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
+ -> macro_parser::NamedParseResult {
+ // `None` is because we're not interpolating
+ let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
+ None,
+ None,
+ tts.iter().cloned().collect(),
+ true);
+ macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
+ }
}
pub type Mac = Spanned<Mac_>;
pub imported_from: Option<Ident>,
pub export: bool,
pub use_locally: bool,
+ pub allow_internal_unstable: bool,
pub body: Vec<TokenTree>,
}
use arena::TypedArena;
use std::cell::RefCell;
use std::fmt;
-use std::old_io::IoResult;
+use std::io;
use std::iter::{self, repeat};
use std::mem;
use std::slice;
}
pub trait NodePrinter {
- fn print_node(&mut self, node: &Node) -> IoResult<()>;
+ fn print_node(&mut self, node: &Node) -> io::Result<()>;
}
impl<'a> NodePrinter for pprust::State<'a> {
- fn print_node(&mut self, node: &Node) -> IoResult<()> {
+ fn print_node(&mut self, node: &Node) -> io::Result<()> {
match *node {
NodeItem(a) => self.print_item(&*a),
NodeForeignItem(a) => self.print_foreign_item(&*a),
pub fn int_ty_max(t: IntTy) -> u64 {
match t {
- TyI8 => 0x80u64,
- TyI16 => 0x8000u64,
- TyIs(_) | TyI32 => 0x80000000u64, // actually ni about TyIs
- TyI64 => 0x8000000000000000u64
+ TyI8 => 0x80,
+ TyI16 => 0x8000,
+ TyIs(_) | TyI32 => 0x80000000, // actually ni about TyIs
+ TyI64 => 0x8000000000000000
}
}
pub fn uint_ty_max(t: UintTy) -> u64 {
match t {
- TyU8 => 0xffu64,
- TyU16 => 0xffffu64,
- TyUs(_) | TyU32 => 0xffffffffu64, // actually ni about TyUs
- TyU64 => 0xffffffffffffffffu64
+ TyU8 => 0xff,
+ TyU16 => 0xffff,
+ TyUs(_) | TyU32 => 0xffffffff, // actually ni about TyUs
+ TyU64 => 0xffffffffffffffff
}
}
use libc::c_uint;
use serialize::{Encodable, Decodable, Encoder, Decoder};
+
+// _____________________________________________________________________________
+// Pos, BytePos, CharPos
+//
+
pub trait Pos {
fn from_usize(n: usize) -> Self;
fn to_usize(&self) -> usize;
}
}
+impl Encodable for BytePos {
+ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ s.emit_u32(self.0)
+ }
+}
+
+impl Decodable for BytePos {
+ fn decode<D: Decoder>(d: &mut D) -> Result<BytePos, D::Error> {
+ Ok(BytePos(try!{ d.read_u32() }))
+ }
+}
+
impl Pos for CharPos {
fn from_usize(n: usize) -> CharPos { CharPos(n) }
fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
}
}
+// _____________________________________________________________________________
+// Span, Spanned
+//
+
/// Spans represent a region of code, used for error reporting. Positions in spans
/// are *absolute* positions from the beginning of the codemap, not positions
/// relative to FileMaps. Methods on the CodeMap can be used to relate spans back
impl Eq for Span {}
impl Encodable for Span {
- /* Note #1972 -- spans are encoded but not decoded */
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_nil()
+ // Encode spans as a single u64 in order to cut down on tagging overhead
+ // added by the RBML metadata encoding. The should be solved differently
+ // altogether some time (FIXME #21482)
+ s.emit_u64( (self.lo.0 as u64) | ((self.hi.0 as u64) << 32) )
}
}
impl Decodable for Span {
- fn decode<D: Decoder>(_d: &mut D) -> Result<Span, D::Error> {
- Ok(DUMMY_SP)
+ fn decode<D: Decoder>(d: &mut D) -> Result<Span, D::Error> {
+ let lo_hi: u64 = try! { d.read_u64() };
+ let lo = BytePos(lo_hi as u32);
+ let hi = BytePos((lo_hi >> 32) as u32);
+ Ok(mk_sp(lo, hi))
}
}
}
}
+// _____________________________________________________________________________
+// Loc, LocWithOpt, FileMapAndLine, FileMapAndBytePos
+//
+
/// A source code location used for error reporting
pub struct Loc {
/// Information about the original source
pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: usize }
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
+
+// _____________________________________________________________________________
+// MacroFormat, NameAndSpan, ExpnInfo, ExpnId
+//
+
/// The syntax with which a macro was invoked.
#[derive(Clone, Copy, Hash, Debug)]
pub enum MacroFormat {
pub name: String,
/// The format with which the macro was invoked.
pub format: MacroFormat,
+ /// Whether the macro is allowed to use #[unstable]/feature-gated
+ /// features internally without forcing the whole crate to opt-in
+ /// to them.
+ pub allow_internal_unstable: bool,
/// The span of the macro definition itself. The macro may not
/// have a sensible definition span (e.g. something defined
/// completely inside libsyntax) in which case this is None.
}
}
+// _____________________________________________________________________________
+// FileMap, MultiByteChar, FileName, FileLines
+//
+
pub type FileName = String;
pub struct FileLines {
}
/// Identifies an offset of a multi-byte character in a FileMap
-#[derive(Copy)]
+#[derive(Copy, RustcEncodable, RustcDecodable, Eq, PartialEq)]
pub struct MultiByteChar {
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// e.g. `<anon>`
pub name: FileName,
/// The complete source code
- pub src: String,
+ pub src: Option<Rc<String>>,
/// The start position of this source in the CodeMap
pub start_pos: BytePos,
+ /// The end position of this source in the CodeMap
+ pub end_pos: BytePos,
/// Locations of lines beginnings in the source code
- pub lines: RefCell<Vec<BytePos> >,
+ pub lines: RefCell<Vec<BytePos>>,
/// Locations of multi-byte characters in the source code
- pub multibyte_chars: RefCell<Vec<MultiByteChar> >,
+ pub multibyte_chars: RefCell<Vec<MultiByteChar>>,
+}
+
+impl Encodable for FileMap {
+ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ s.emit_struct("FileMap", 5, |s| {
+ try! { s.emit_struct_field("name", 0, |s| self.name.encode(s)) };
+ try! { s.emit_struct_field("start_pos", 1, |s| self.start_pos.encode(s)) };
+ try! { s.emit_struct_field("end_pos", 2, |s| self.end_pos.encode(s)) };
+ try! { s.emit_struct_field("lines", 3, |s| {
+ let lines = self.lines.borrow();
+ // store the length
+ try! { s.emit_u32(lines.len() as u32) };
+
+ if lines.len() > 0 {
+ // In order to preserve some space, we exploit the fact that
+ // the lines list is sorted and individual lines are
+ // probably not that long. Because of that we can store lines
+ // as a difference list, using as little space as possible
+ // for the differences.
+ let max_line_length = if lines.len() == 1 {
+ 0
+ } else {
+ lines.as_slice()
+ .windows(2)
+ .map(|w| w[1] - w[0])
+ .map(|bp| bp.to_usize())
+ .max()
+ .unwrap()
+ };
+
+ let bytes_per_diff: u8 = match max_line_length {
+ 0 ... 0xFF => 1,
+ 0x100 ... 0xFFFF => 2,
+ _ => 4
+ };
+
+ // Encode the number of bytes used per diff.
+ try! { bytes_per_diff.encode(s) };
+
+ // Encode the first element.
+ try! { lines[0].encode(s) };
+
+ let diff_iter = (&lines[..]).windows(2)
+ .map(|w| (w[1] - w[0]));
+
+ match bytes_per_diff {
+ 1 => for diff in diff_iter { try! { (diff.0 as u8).encode(s) } },
+ 2 => for diff in diff_iter { try! { (diff.0 as u16).encode(s) } },
+ 4 => for diff in diff_iter { try! { (diff.0 as u32).encode(s) } },
+ _ => unreachable!()
+ }
+ }
+
+ Ok(())
+ })
+ };
+ s.emit_struct_field("multibyte_chars", 4, |s| {
+ (*self.multibyte_chars.borrow()).encode(s)
+ })
+ })
+ }
+}
+
+impl Decodable for FileMap {
+ fn decode<D: Decoder>(d: &mut D) -> Result<FileMap, D::Error> {
+
+ d.read_struct("FileMap", 5, |d| {
+ let name: String = try! {
+ d.read_struct_field("name", 0, |d| Decodable::decode(d))
+ };
+ let start_pos: BytePos = try! {
+ d.read_struct_field("start_pos", 1, |d| Decodable::decode(d))
+ };
+ let end_pos: BytePos = try! {
+ d.read_struct_field("end_pos", 2, |d| Decodable::decode(d))
+ };
+ let lines: Vec<BytePos> = try! {
+ d.read_struct_field("lines", 3, |d| {
+ let num_lines: u32 = try! { Decodable::decode(d) };
+ let mut lines = Vec::with_capacity(num_lines as usize);
+
+ if num_lines > 0 {
+ // Read the number of bytes used per diff.
+ let bytes_per_diff: u8 = try! { Decodable::decode(d) };
+
+ // Read the first element.
+ let mut line_start: BytePos = try! { Decodable::decode(d) };
+ lines.push(line_start);
+
+ for _ in 1..num_lines {
+ let diff = match bytes_per_diff {
+ 1 => try! { d.read_u8() } as u32,
+ 2 => try! { d.read_u16() } as u32,
+ 4 => try! { d.read_u32() },
+ _ => unreachable!()
+ };
+
+ line_start = line_start + BytePos(diff);
+
+ lines.push(line_start);
+ }
+ }
+
+ Ok(lines)
+ })
+ };
+ let multibyte_chars: Vec<MultiByteChar> = try! {
+ d.read_struct_field("multibyte_chars", 4, |d| Decodable::decode(d))
+ };
+ Ok(FileMap {
+ name: name,
+ start_pos: start_pos,
+ end_pos: end_pos,
+ src: None,
+ lines: RefCell::new(lines),
+ multibyte_chars: RefCell::new(multibyte_chars)
+ })
+ })
+ }
}
impl FileMap {
/// get a line from the list of pre-computed line-beginnings
///
pub fn get_line(&self, line_number: usize) -> Option<String> {
- let lines = self.lines.borrow();
- lines.get(line_number).map(|&line| {
- let begin: BytePos = line - self.start_pos;
- let begin = begin.to_usize();
- let slice = &self.src[begin..];
- match slice.find('\n') {
- Some(e) => &slice[..e],
- None => slice
- }.to_string()
- })
+ match self.src {
+ Some(ref src) => {
+ let lines = self.lines.borrow();
+ lines.get(line_number).map(|&line| {
+ let begin: BytePos = line - self.start_pos;
+ let begin = begin.to_usize();
+ let slice = &src[begin..];
+ match slice.find('\n') {
+ Some(e) => &slice[..e],
+ None => slice
+ }.to_string()
+ })
+ }
+ None => None
+ }
}
pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
!(self.name.starts_with("<") &&
self.name.ends_with(">"))
}
+
+ pub fn is_imported(&self) -> bool {
+ self.src.is_none()
+ }
}
+
+// _____________________________________________________________________________
+// CodeMap
+//
+
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
expansions: RefCell<Vec<ExpnInfo>>
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
None => 0,
- Some(last) => last.start_pos.to_usize() + last.src.len(),
+ Some(last) => last.end_pos.to_usize(),
};
// Remove utf-8 BOM if any.
src.push('\n');
}
+ let end_pos = start_pos + src.len();
+
let filemap = Rc::new(FileMap {
name: filename,
- src: src.to_string(),
+ src: Some(Rc::new(src)),
start_pos: Pos::from_usize(start_pos),
+ end_pos: Pos::from_usize(end_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
});
filemap
}
+ /// Allocates a new FileMap representing a source file from an external
+ /// crate. The source code of such an "imported filemap" is not available,
+ /// but we still know enough to generate accurate debuginfo location
+ /// information for things inlined from other crates.
+ pub fn new_imported_filemap(&self,
+ filename: FileName,
+ source_len: usize,
+ file_local_lines: Vec<BytePos>,
+ file_local_multibyte_chars: Vec<MultiByteChar>)
+ -> Rc<FileMap> {
+ let mut files = self.files.borrow_mut();
+ let start_pos = match files.last() {
+ None => 0,
+ Some(last) => last.end_pos.to_usize(),
+ };
+
+ let end_pos = Pos::from_usize(start_pos + source_len);
+ let start_pos = Pos::from_usize(start_pos);
+
+ let lines = file_local_lines.map_in_place(|pos| pos + start_pos);
+ let multibyte_chars = file_local_multibyte_chars.map_in_place(|mbc| MultiByteChar {
+ pos: mbc.pos + start_pos,
+ bytes: mbc.bytes
+ });
+
+ let filemap = Rc::new(FileMap {
+ name: filename,
+ src: None,
+ start_pos: start_pos,
+ end_pos: end_pos,
+ lines: RefCell::new(lines),
+ multibyte_chars: RefCell::new(multibyte_chars),
+ });
+
+ files.push(filemap.clone());
+
+ filemap
+ }
+
pub fn mk_substr_filename(&self, sp: Span) -> String {
let pos = self.lookup_char_pos(sp.lo);
(format!("<{}:{}:{}>",
return Err(SpanSnippetError::IllFormedSpan(sp));
}
- let begin = self.lookup_byte_offset(sp.lo);
- let end = self.lookup_byte_offset(sp.hi);
+ let local_begin = self.lookup_byte_offset(sp.lo);
+ let local_end = self.lookup_byte_offset(sp.hi);
- if begin.fm.start_pos != end.fm.start_pos {
+ if local_begin.fm.start_pos != local_end.fm.start_pos {
return Err(SpanSnippetError::DistinctSources(DistinctSources {
- begin: (begin.fm.name.clone(),
- begin.fm.start_pos),
- end: (end.fm.name.clone(),
- end.fm.start_pos)
+ begin: (local_begin.fm.name.clone(),
+ local_begin.fm.start_pos),
+ end: (local_end.fm.name.clone(),
+ local_end.fm.start_pos)
}));
} else {
- let start = begin.pos.to_usize();
- let limit = end.pos.to_usize();
- if start > limit || limit > begin.fm.src.len() {
- return Err(SpanSnippetError::MalformedForCodemap(
- MalformedCodemapPositions {
- name: begin.fm.name.clone(),
- source_len: begin.fm.src.len(),
- begin_pos: begin.pos,
- end_pos: end.pos,
- }));
- }
+ match local_begin.fm.src {
+ Some(ref src) => {
+ let start_index = local_begin.pos.to_usize();
+ let end_index = local_end.pos.to_usize();
+ let source_len = (local_begin.fm.end_pos -
+ local_begin.fm.start_pos).to_usize();
+
+ if start_index > end_index || end_index > source_len {
+ return Err(SpanSnippetError::MalformedForCodemap(
+ MalformedCodemapPositions {
+ name: local_begin.fm.name.clone(),
+ source_len: source_len,
+ begin_pos: local_begin.pos,
+ end_pos: local_end.pos,
+ }));
+ }
- return Ok((&begin.fm.src[start..limit]).to_string())
+ return Ok((&src[start_index..end_index]).to_string())
+ }
+ None => {
+ return Err(SpanSnippetError::SourceNotAvailable {
+ filename: local_begin.fm.name.clone()
+ });
+ }
+ }
}
}
panic!("asking for {} which we don't know about", filename);
}
+ /// For a global BytePos compute the local offset within the containing FileMap
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
}
}
- /// Check if a span is "internal" to a macro. This means that it is entirely generated by a
- /// macro expansion and contains no code that was passed in as an argument.
- pub fn span_is_internal(&self, span: Span) -> bool {
- // first, check if the given expression was generated by a macro or not
- // we need to go back the expn_info tree to check only the arguments
- // of the initial macro call, not the nested ones.
- let mut is_internal = false;
- let mut expnid = span.expn_id;
- while self.with_expn_info(expnid, |expninfo| {
- match expninfo {
- Some(ref info) => {
- // save the parent expn_id for next loop iteration
- expnid = info.call_site.expn_id;
- if info.callee.name == "format_args" {
- // This is a hack because the format_args builtin calls unstable APIs.
- // I spent like 6 hours trying to solve this more generally but am stupid.
- is_internal = true;
- false
- } else if info.callee.span.is_none() {
- // it's a compiler built-in, we *really* don't want to mess with it
- // so we skip it, unless it was called by a regular macro, in which case
- // we will handle the caller macro next turn
- is_internal = true;
- true // continue looping
+ /// Check if a span is "internal" to a macro in which #[unstable]
+ /// items can be used (that is, a macro marked with
+ /// `#[allow_internal_unstable]`).
+ pub fn span_allows_unstable(&self, span: Span) -> bool {
+ debug!("span_allows_unstable(span = {:?})", span);
+ let mut allows_unstable = false;
+ let mut expn_id = span.expn_id;
+ loop {
+ let quit = self.with_expn_info(expn_id, |expninfo| {
+ debug!("span_allows_unstable: expninfo = {:?}", expninfo);
+ expninfo.map_or(/* hit the top level */ true, |info| {
+
+ let span_comes_from_this_expansion =
+ info.callee.span.map_or(span == info.call_site, |mac_span| {
+ mac_span.lo <= span.lo && span.hi < mac_span.hi
+ });
+
+ debug!("span_allows_unstable: from this expansion? {}, allows unstable? {}",
+ span_comes_from_this_expansion,
+ info.callee.allow_internal_unstable);
+ if span_comes_from_this_expansion {
+ allows_unstable = info.callee.allow_internal_unstable;
+ // we've found the right place, stop looking
+ true
} else {
- // was this expression from the current macro arguments ?
- is_internal = !( span.lo > info.call_site.lo &&
- span.hi < info.call_site.hi );
- true // continue looping
+ // not the right place, keep looking
+ expn_id = info.call_site.expn_id;
+ false
}
- },
- _ => false // stop looping
+ })
+ });
+ if quit {
+ break
}
- }) { /* empty while loop body */ }
- return is_internal;
+ }
+ debug!("span_allows_unstable? {}", allows_unstable);
+ allows_unstable
}
}
+// _____________________________________________________________________________
+// SpanSnippetError, DistinctSources, MalformedCodemapPositions
+//
+
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum SpanSnippetError {
IllFormedSpan(Span),
DistinctSources(DistinctSources),
MalformedForCodemap(MalformedCodemapPositions),
+ SourceNotAvailable { filename: String }
}
#[derive(Clone, PartialEq, Eq, Debug)]
end_pos: BytePos
}
+
+// _____________________________________________________________________________
+// Tests
+//
+
#[cfg(test)]
mod test {
use super::*;
})
}
+#[macro_export]
+macro_rules! fileline_help {
+ ($session:expr, $span:expr, $($message:tt)*) => ({
+ ($session).fileline_help($span, &format!($($message)*))
+ })
+}
+
#[macro_export]
macro_rules! register_diagnostics {
($($code:tt),*) => (
name: "asm".to_string(),
format: codemap::MacroBang,
span: None,
+ allow_internal_unstable: false,
},
});
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
/// A normal, function-like syntax extension.
///
/// `bytes!` is a `NormalTT`.
- NormalTT(Box<TTMacroExpander + 'static>, Option<Span>),
+ ///
+ /// The `bool` dictates whether the contents of the macro can
+ /// directly use `#[unstable]` things (true == yes).
+ NormalTT(Box<TTMacroExpander + 'static>, Option<Span>, bool),
/// A function-like syntax extension that has an extra ident before
/// the block.
///
- IdentTT(Box<IdentMacroExpander + 'static>, Option<Span>),
+ IdentTT(Box<IdentMacroExpander + 'static>, Option<Span>, bool),
/// Represents `macro_rules!` itself.
MacroRulesTT,
-> SyntaxEnv {
// utility function to simplify creating NormalTT syntax extensions
fn builtin_normal_expander(f: MacroExpanderFn) -> SyntaxExtension {
- NormalTT(Box::new(f), None)
+ NormalTT(Box::new(f), None, false)
}
let mut syntax_expanders = SyntaxEnv::new();
syntax_expanders.insert(intern("macro_rules"), MacroRulesTT);
syntax_expanders.insert(intern("format_args"),
- builtin_normal_expander(
- ext::format::expand_format_args));
+ // format_args uses `unstable` things internally.
+ NormalTT(Box::new(ext::format::expand_format_args), None, true));
syntax_expanders.insert(intern("env"),
builtin_normal_expander(
ext::env::expand_env));
syntax_expanders.insert(intern("quote_stmt"),
builtin_normal_expander(
ext::quote::expand_quote_stmt));
+ syntax_expanders.insert(intern("quote_matcher"),
+ builtin_normal_expander(
+ ext::quote::expand_quote_matcher));
+ syntax_expanders.insert(intern("quote_attr"),
+ builtin_normal_expander(
+ ext::quote::expand_quote_attr));
}
syntax_expanders.insert(intern("line"),
self.print_backtrace();
self.parse_sess.span_diagnostic.span_help(sp, msg);
}
+ pub fn fileline_help(&self, sp: Span, msg: &str) {
+ self.print_backtrace();
+ self.parse_sess.span_diagnostic.fileline_help(sp, msg);
+ }
pub fn bug(&self, msg: &str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.handler().bug(msg);
callee: codemap::NameAndSpan {
name: format!("derive({})", trait_name),
format: codemap::MacroAttribute,
- span: Some(self.span)
+ span: Some(self.span),
+ allow_internal_unstable: false,
}
});
to_set
use codemap;
use codemap::{Span, Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
use ext::base::*;
-use feature_gate::{Features};
+use feature_gate::{self, Features};
use fold;
use fold::*;
use parse;
None
}
Some(rc) => match *rc {
- NormalTT(ref expandfun, exp_span) => {
+ NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
fld.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
name: extnamestr.to_string(),
format: MacroBang,
span: exp_span,
+ allow_internal_unstable: allow_internal_unstable,
},
});
let fm = fresh_mark();
name: mname.to_string(),
format: MacroAttribute,
span: None,
+ // attributes can do whatever they like,
+ // for now
+ allow_internal_unstable: true,
}
});
it = mac.expand(fld.cx, attr.span, &*attr.node.value, it);
fld.cx.span_warn(attr.span, "macro_escape is a deprecated synonym for macro_use");
is_use = true;
if let ast::AttrInner = attr.node.style {
- fld.cx.span_help(attr.span, "consider an outer attribute, \
+ fld.cx.fileline_help(attr.span, "consider an outer attribute, \
#[macro_use] mod ...");
}
};
}
Some(rc) => match *rc {
- NormalTT(ref expander, span) => {
+ NormalTT(ref expander, span, allow_internal_unstable) => {
if it.ident.name != parse::token::special_idents::invalid.name {
fld.cx
.span_err(path_span,
callee: NameAndSpan {
name: extnamestr.to_string(),
format: MacroBang,
- span: span
+ span: span,
+ allow_internal_unstable: allow_internal_unstable,
}
});
// mark before expansion:
let marked_before = mark_tts(&tts[..], fm);
expander.expand(fld.cx, it.span, &marked_before[..])
}
- IdentTT(ref expander, span) => {
+ IdentTT(ref expander, span, allow_internal_unstable) => {
if it.ident.name == parse::token::special_idents::invalid.name {
fld.cx.span_err(path_span,
&format!("macro {}! expects an ident argument",
callee: NameAndSpan {
name: extnamestr.to_string(),
format: MacroBang,
- span: span
+ span: span,
+ allow_internal_unstable: allow_internal_unstable,
}
});
// mark before expansion:
);
return SmallVector::zero();
}
+
fld.cx.bt_push(ExpnInfo {
call_site: it.span,
callee: NameAndSpan {
name: extnamestr.to_string(),
format: MacroBang,
span: None,
+ // `macro_rules!` doesn't directly allow
+ // unstable (this is orthogonal to whether
+ // the macro it creates allows it)
+ allow_internal_unstable: false,
}
});
// DON'T mark before expansion.
+ let allow_internal_unstable = attr::contains_name(&it.attrs,
+ "allow_internal_unstable");
+
+ // ensure any #[allow_internal_unstable]s are
+ // detected (including nested macro definitions
+ // etc.)
+ if allow_internal_unstable && !fld.cx.ecfg.enable_allow_internal_unstable() {
+ feature_gate::emit_feature_err(
+ &fld.cx.parse_sess.span_diagnostic,
+ "allow_internal_unstable",
+ it.span,
+ feature_gate::EXPLAIN_ALLOW_INTERNAL_UNSTABLE)
+ }
+
let def = ast::MacroDef {
ident: it.ident,
attrs: it.attrs.clone(),
imported_from: None,
export: attr::contains_name(&it.attrs, "macro_export"),
use_locally: true,
+ allow_internal_unstable: allow_internal_unstable,
body: tts,
};
fld.cx.insert_macro(def);
}
Some(rc) => match *rc {
- NormalTT(ref expander, tt_span) => {
+ NormalTT(ref expander, tt_span, allow_internal_unstable) => {
fld.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
name: extnamestr.to_string(),
format: MacroBang,
- span: tt_span
+ span: tt_span,
+ allow_internal_unstable: allow_internal_unstable,
}
});
callee: NameAndSpan {
name: mname.to_string(),
format: MacroAttribute,
- span: None
+ span: None,
+ // attributes can do whatever they like,
+ // for now.
+ allow_internal_unstable: true,
}
});
name: mname.to_string(),
format: MacroAttribute,
span: None,
+ // attributes can do whatever they like,
+ // for now
+ allow_internal_unstable: true,
}
});
it = mac.expand(fld.cx, attr.span, &*attr.node.value, it);
_ => false,
}
}
+
+ pub fn enable_allow_internal_unstable(&self) -> bool {
+ match self.features {
+ Some(&Features { allow_internal_unstable: true, .. }) => true,
+ _ => false
+ }
+ }
}
pub fn expand_crate<'feat>(parse_sess: &parse::ParseSess,
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let (cx_expr, expr) = expand_tts(cx, sp, tts);
- let expanded = expand_wrapper(cx, sp, cx_expr, expr);
+ let expanded = expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]]);
base::MacEager::expr(expanded)
}
base::MacEager::expr(expanded)
}
+pub fn expand_quote_attr(cx: &mut ExtCtxt,
+ sp: Span,
+ tts: &[ast::TokenTree])
+ -> Box<base::MacResult+'static> {
+ let expanded = expand_parse_call(cx, sp, "parse_attribute",
+ vec!(cx.expr_bool(sp, true)), tts);
+
+ base::MacEager::expr(expanded)
+}
+
+pub fn expand_quote_matcher(cx: &mut ExtCtxt,
+ sp: Span,
+ tts: &[ast::TokenTree])
+ -> Box<base::MacResult+'static> {
+ let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
+ let mut vector = mk_stmts_let(cx, sp);
+ vector.extend(statements_mk_tts(cx, &tts[..], true).into_iter());
+ let block = cx.expr_block(
+ cx.block_all(sp,
+ vector,
+ Some(cx.expr_ident(sp, id_ext("tt")))));
+
+ let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]);
+ base::MacEager::expr(expanded)
+}
+
fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
strs.iter().map(|str| str_to_ident(&(*str))).collect()
}
}
#[allow(non_upper_case_globals)]
-fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
+fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
macro_rules! mk_lit {
($name: expr, $suffix: expr, $($args: expr),*) => {{
let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
vec!(mk_name(cx, sp, ident.ident())));
}
+ token::MatchNt(name, kind, namep, kindp) => {
+ return cx.expr_call(sp,
+ mk_token_path(cx, sp, "MatchNt"),
+ vec!(mk_ident(cx, sp, name),
+ mk_ident(cx, sp, kind),
+ match namep {
+ ModName => mk_token_path(cx, sp, "ModName"),
+ Plain => mk_token_path(cx, sp, "Plain"),
+ },
+ match kindp {
+ ModName => mk_token_path(cx, sp, "ModName"),
+ Plain => mk_token_path(cx, sp, "Plain"),
+ }));
+ }
+
token::Interpolated(_) => panic!("quote! with interpolated token"),
_ => ()
token::FatArrow => "FatArrow",
token::Pound => "Pound",
token::Dollar => "Dollar",
+ token::Question => "Question",
token::Underscore => "Underscore",
token::Eof => "Eof",
- _ => panic!(),
+ _ => panic!("unhandled token in quote!"),
};
mk_token_path(cx, sp, name)
}
-fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
+fn statements_mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree, matcher: bool) -> Vec<P<ast::Stmt>> {
match *tt {
ast::TtToken(sp, SubstNt(ident, _)) => {
// tt.extend($ident.to_tokens(ext_cx).into_iter())
vec!(cx.stmt_expr(e_push))
}
- ref tt @ ast::TtToken(_, MatchNt(..)) => {
+ ref tt @ ast::TtToken(_, MatchNt(..)) if !matcher => {
let mut seq = vec![];
for i in 0..tt.len() {
seq.push(tt.get_tt(i));
}
- mk_tts(cx, &seq[..])
+ statements_mk_tts(cx, &seq[..], matcher)
}
ast::TtToken(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let e_tok = cx.expr_call(sp,
mk_ast_path(cx, sp, "TtToken"),
- vec!(e_sp, mk_token(cx, sp, tok)));
+ vec!(e_sp, expr_mk_token(cx, sp, tok)));
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
vec!(cx.stmt_expr(e_push))
},
ast::TtDelimited(_, ref delimed) => {
- mk_tt(cx, &delimed.open_tt()).into_iter()
- .chain(delimed.tts.iter().flat_map(|tt| mk_tt(cx, tt).into_iter()))
- .chain(mk_tt(cx, &delimed.close_tt()).into_iter())
+ statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter()
+ .chain(delimed.tts.iter()
+ .flat_map(|tt| statements_mk_tt(cx, tt, matcher).into_iter()))
+ .chain(statements_mk_tt(cx, &delimed.close_tt(), matcher).into_iter())
.collect()
},
- ast::TtSequence(..) => panic!("TtSequence in quote!"),
- }
-}
+ ast::TtSequence(sp, ref seq) => {
+ if !matcher {
+ panic!("TtSequence in quote!");
+ }
-fn mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> Vec<P<ast::Stmt>> {
- let mut ss = Vec::new();
- for tt in tts {
- ss.extend(mk_tt(cx, tt).into_iter());
+ let e_sp = cx.expr_ident(sp, id_ext("_sp"));
+
+ let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
+ let mut tts_stmts = vec![stmt_let_tt];
+ tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher).into_iter());
+ let e_tts = cx.expr_block(cx.block(sp, tts_stmts,
+ Some(cx.expr_ident(sp, id_ext("tt")))));
+ let e_separator = match seq.separator {
+ Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)),
+ None => cx.expr_none(sp),
+ };
+ let e_op = match seq.op {
+ ast::ZeroOrMore => mk_ast_path(cx, sp, "ZeroOrMore"),
+ ast::OneOrMore => mk_ast_path(cx, sp, "OneOrMore"),
+ };
+ let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts),
+ cx.field_imm(sp, id_ext("separator"), e_separator),
+ cx.field_imm(sp, id_ext("op"), e_op),
+ cx.field_imm(sp, id_ext("num_captures"),
+ cx.expr_usize(sp, seq.num_captures))];
+ let seq_path = vec![id_ext("syntax"), id_ext("ast"), id_ext("SequenceRepetition")];
+ let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
+ let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
+ id_ext("rc"),
+ id_ext("Rc"),
+ id_ext("new")],
+ vec![e_seq_struct]);
+ let e_tok = cx.expr_call(sp,
+ mk_ast_path(cx, sp, "TtSequence"),
+ vec!(e_sp, e_rc_new));
+ let e_push =
+ cx.expr_method_call(sp,
+ cx.expr_ident(sp, id_ext("tt")),
+ id_ext("push"),
+ vec!(e_tok));
+ vec!(cx.stmt_expr(e_push))
+ }
}
- ss
}
-fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
- -> (P<ast::Expr>, P<ast::Expr>) {
+fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree])
+ -> (P<ast::Expr>, Vec<ast::TokenTree>) {
// NB: It appears that the main parser loses its mind if we consider
- // $foo as a TtNonterminal during the main parse, so we have to re-parse
+ // $foo as a SubstNt during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
// it has to do with transition away from supporting old-style macros, so
// try removing it when enough of them are gone.
let tts = p.parse_all_token_trees();
p.abort_if_errors();
+ (cx_expr, tts)
+}
+
+fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<P<ast::Stmt>> {
// We also bind a single value, sp, to ext_cx.call_site()
//
// This causes every span in a token-tree quote to be attributed to the
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
- let mut vector = vec!(stmt_let_sp, stmt_let_tt);
- vector.extend(mk_tts(cx, &tts[..]).into_iter());
+ vec!(stmt_let_sp, stmt_let_tt)
+}
+
+fn statements_mk_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], matcher: bool) -> Vec<P<ast::Stmt>> {
+ let mut ss = Vec::new();
+ for tt in tts {
+ ss.extend(statements_mk_tt(cx, tt, matcher).into_iter());
+ }
+ ss
+}
+
+fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+ -> (P<ast::Expr>, P<ast::Expr>) {
+ let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
+
+ let mut vector = mk_stmts_let(cx, sp);
+ vector.extend(statements_mk_tts(cx, &tts[..], false).into_iter());
let block = cx.expr_block(
cx.block_all(sp,
vector,
fn expand_wrapper(cx: &ExtCtxt,
sp: Span,
cx_expr: P<ast::Expr>,
- expr: P<ast::Expr>) -> P<ast::Expr> {
+ expr: P<ast::Expr>,
+ imports: &[&[&str]]) -> P<ast::Expr> {
// Explicitly borrow to avoid moving from the invoker (#16992)
let cx_expr_borrow = cx.expr_addr_of(sp, cx.expr_deref(sp, cx_expr));
let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr_borrow);
- let stmts = [
- &["syntax", "ext", "quote", "rt"],
- ].iter().map(|path| {
+ let stmts = imports.iter().map(|path| {
+ // make item: `use ...;`
let path = path.iter().map(|s| s.to_string()).collect();
cx.stmt_item(sp, cx.item_use_glob(sp, ast::Inherited, ids_ext(path)))
}).chain(Some(stmt_let_ext_cx).into_iter()).collect();
let expr = cx.expr_method_call(sp, new_parser_call, id_ext(parse_method),
arg_exprs);
- expand_wrapper(cx, sp, cx_expr, expr)
+ if parse_method == "parse_attribute" {
+ expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"],
+ &["syntax", "parse", "attr"]])
+ } else {
+ expand_wrapper(cx, sp, cx_expr, expr, &[&["syntax", "ext", "quote", "rt"]])
+ }
}
use ptr::P;
use util::small_vector::SmallVector;
-use std::old_io::File;
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::{Path, PathBuf};
use std::rc::Rc;
// These macros all relate to the file system; they either return
cx.cfg(),
&res_rel_file(cx,
sp,
- &Path::new(file)),
+ Path::new(&file)),
true,
None,
sp);
Some(f) => f,
None => return DummyResult::expr(sp)
};
- let file = res_rel_file(cx, sp, &Path::new(file));
- let bytes = match File::open(&file).read_to_end() {
+ let file = res_rel_file(cx, sp, Path::new(&file));
+ let mut bytes = Vec::new();
+ match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) {
+ Ok(..) => {}
Err(e) => {
cx.span_err(sp,
&format!("couldn't read {}: {}",
e));
return DummyResult::expr(sp);
}
- Ok(bytes) => bytes,
};
match String::from_utf8(bytes) {
Ok(src) => {
Some(f) => f,
None => return DummyResult::expr(sp)
};
- let file = res_rel_file(cx, sp, &Path::new(file));
- match File::open(&file).read_to_end() {
+ let file = res_rel_file(cx, sp, Path::new(&file));
+ let mut bytes = Vec::new();
+ match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) {
Err(e) => {
cx.span_err(sp,
&format!("couldn't read {}: {}", file.display(), e));
return DummyResult::expr(sp);
}
- Ok(bytes) => {
- let bytes = bytes.iter().cloned().collect();
+ Ok(..) => {
base::MacEager::expr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes))))
}
}
// resolve a file-system path to an absolute file-system path (if it
// isn't already)
-fn res_rel_file(cx: &mut ExtCtxt, sp: codemap::Span, arg: &Path) -> Path {
+fn res_rel_file(cx: &mut ExtCtxt, sp: codemap::Span, arg: &Path) -> PathBuf {
// NB: relative paths are resolved relative to the compilation unit
if !arg.is_absolute() {
- let mut cu = Path::new(cx.codemap().span_to_filename(sp));
- cu.pop();
+ let mut cu = PathBuf::new(&cx.codemap().span_to_filename(sp));
+ if cu.parent().is_some() {
+ cu.pop();
+ } else {
+ cu = PathBuf::new("");
+ }
cu.push(arg);
cu
} else {
- arg.clone()
+ arg.to_path_buf()
}
}
ret_val
}
-pub enum ParseResult {
- Success(HashMap<Ident, Rc<NamedMatch>>),
+pub enum ParseResult<T> {
+ Success(T),
Failure(codemap::Span, String),
Error(codemap::Span, String)
}
+pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
+pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
+
pub fn parse_or_else(sess: &ParseSess,
cfg: ast::CrateConfig,
rdr: TtReader,
cfg: ast::CrateConfig,
mut rdr: TtReader,
ms: &[TokenTree])
- -> ParseResult {
+ -> NamedParseResult {
let mut cur_eis = Vec::new();
cur_eis.push(initial_matcher_pos(Rc::new(ms.iter()
.cloned()
use ext::tt::macro_parser::{Success, Error, Failure};
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use ext::tt::macro_parser::{parse, parse_or_else};
-use parse::lexer::{new_tt_reader, new_tt_reader_with_doc_flag};
+use parse::lexer::new_tt_reader;
use parse::parser::Parser;
use parse::attr::ParserAttr;
use parse::token::{self, special_idents, gensym_ident, NtTT, Token};
TtDelimited(_, ref delim) => &delim.tts[..],
_ => cx.span_fatal(sp, "malformed macro lhs")
};
- // `None` is because we're not interpolating
- let arg_rdr = new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
- None,
- None,
- arg.iter()
- .cloned()
- .collect(),
- true);
- match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) {
+
+ match TokenTree::parse(cx, lhs_tt, arg) {
Success(named_matches) => {
let rhs = match *rhses[i] {
// okay, what's your transcriber?
rhses: rhses,
};
- NormalTT(exp, Some(def.span))
+ NormalTT(exp, Some(def.span), def.allow_internal_unstable)
}
fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
// Allows the use of `static_assert`
("static_assert", "1.0.0", Active),
+
+ // Allows the use of #[allow_internal_unstable]. This is an
+ // attribute on macro_rules! and can't use the attribute handling
+ // below (it has to be checked before expansion possibly makes
+ // macros disappear).
+ ("allow_internal_unstable", "1.0.0", Active),
];
// (changing above list without updating src/doc/reference.md makes @cmr sad)
("recursion_limit", CrateLevel),
];
-#[derive(PartialEq, Copy)]
+#[derive(PartialEq, Copy, Debug)]
pub enum AttributeType {
/// Normal, builtin attribute that is consumed
/// by the compiler before the unused_attribute check
pub allow_log_syntax: bool,
pub allow_concat_idents: bool,
pub allow_trace_macros: bool,
+ pub allow_internal_unstable: bool,
pub old_orphan_check: bool,
pub simd_ffi: bool,
pub unmarked_api: bool,
allow_log_syntax: false,
allow_concat_idents: false,
allow_trace_macros: false,
+ allow_internal_unstable: false,
old_orphan_check: false,
simd_ffi: false,
unmarked_api: false,
features: Vec<&'static str>,
span_handler: &'a SpanHandler,
cm: &'a CodeMap,
+ do_warnings: bool,
}
impl<'a> Context<'a> {
fn gate_feature(&self, feature: &str, span: Span, explain: &str) {
- if !self.has_feature(feature) {
+ let has_feature = self.has_feature(feature);
+ debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", feature, span, has_feature);
+ if !has_feature {
emit_feature_err(self.span_handler, feature, span, explain);
}
}
fn warn_feature(&self, feature: &str, span: Span, explain: &str) {
- if !self.has_feature(feature) {
+ if !self.has_feature(feature) && self.do_warnings {
emit_feature_warn(self.span_handler, feature, span, explain);
}
}
pub fn emit_feature_err(diag: &SpanHandler, feature: &str, span: Span, explain: &str) {
diag.span_err(span, explain);
- diag.span_help(span, &format!("add #![feature({})] to the \
+ diag.fileline_help(span, &format!("add #![feature({})] to the \
crate attributes to enable",
feature));
}
pub fn emit_feature_warn(diag: &SpanHandler, feature: &str, span: Span, explain: &str) {
diag.span_warn(span, explain);
if diag.handler.can_emit_warnings {
- diag.span_help(span, &format!("add #![feature({})] to the \
+ diag.fileline_help(span, &format!("add #![feature({})] to the \
crate attributes to silence this warning",
feature));
}
pub const EXPLAIN_TRACE_MACROS: &'static str =
"`trace_macros` is not stable enough for use and is subject to change";
+pub const EXPLAIN_ALLOW_INTERNAL_UNSTABLE: &'static str =
+ "allow_internal_unstable side-steps feature gating and stability checks";
struct MacroVisitor<'a> {
context: &'a Context<'a>
self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS);
}
}
+
+ fn visit_attribute(&mut self, attr: &'v ast::Attribute) {
+ if attr.name() == "allow_internal_unstable" {
+ self.context.gate_feature("allow_internal_unstable", attr.span,
+ EXPLAIN_ALLOW_INTERNAL_UNSTABLE)
+ }
+ }
}
struct PostExpansionVisitor<'a> {
impl<'a> PostExpansionVisitor<'a> {
fn gate_feature(&self, feature: &str, span: Span, explain: &str) {
- if !self.context.cm.span_is_internal(span) {
+ if !self.context.cm.span_allows_unstable(span) {
self.context.gate_feature(feature, span, explain)
}
}
}
fn visit_attribute(&mut self, attr: &ast::Attribute) {
+ debug!("visit_attribute(attr = {:?})", attr);
let name = &*attr.name();
for &(n, ty) in KNOWN_ATTRIBUTES {
if n == name {
if let Gated(gate, desc) = ty {
self.gate_feature(gate, attr.span, desc);
}
+ debug!("visit_attribute: {:?} is known, {:?}", name, ty);
return;
}
}
}
fn check_crate_inner<F>(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::Crate,
+ do_warnings: bool,
check: F)
-> Features
where F: FnOnce(&mut Context, &ast::Crate)
let mut cx = Context {
features: Vec::new(),
span_handler: span_handler,
+ do_warnings: do_warnings,
cm: cm,
};
allow_log_syntax: cx.has_feature("log_syntax"),
allow_concat_idents: cx.has_feature("concat_idents"),
allow_trace_macros: cx.has_feature("trace_macros"),
+ allow_internal_unstable: cx.has_feature("allow_internal_unstable"),
old_orphan_check: cx.has_feature("old_orphan_check"),
simd_ffi: cx.has_feature("simd_ffi"),
unmarked_api: cx.has_feature("unmarked_api"),
pub fn check_crate_macros(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::Crate)
-> Features {
- check_crate_inner(cm, span_handler, krate,
+ check_crate_inner(cm, span_handler, krate, true,
|ctx, krate| visit::walk_crate(&mut MacroVisitor { context: ctx }, krate))
}
-pub fn check_crate(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::Crate)
--> Features {
- check_crate_inner(cm, span_handler, krate,
+pub fn check_crate(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::Crate,
+ do_warnings: bool) -> Features
+{
+ check_crate_inner(cm, span_handler, krate, do_warnings,
|ctx, krate| visit::walk_crate(&mut PostExpansionVisitor { context: ctx },
krate))
}
#[cfg(test)]
mod test {
- use std::old_io;
+ use std::io;
use ast;
use util::parser_testing::{string_to_crate, matches_codepattern};
use parse::token;
// this version doesn't care about getting comments or docstrings in.
fn fake_print_crate(s: &mut pprust::State,
- krate: &ast::Crate) -> old_io::IoResult<()> {
+ krate: &ast::Crate) -> io::Result<()> {
s.print_mod(&krate.module, &krate.attrs)
}
#![feature(staged_api)]
#![feature(std_misc)]
#![feature(unicode)]
+#![feature(path)]
+#![feature(fs)]
+#![feature(io)]
extern crate arena;
extern crate fmt_macros;
self.span_err(span,
"an inner attribute is not permitted in \
this context");
- self.span_help(span,
+ self.fileline_help(span,
"place inner attribute at the top of the module or block");
}
ast::AttrInner
use parse::lexer;
use print::pprust;
-use std::old_io;
+use std::io::Read;
use std::str;
-use std::string::String;
use std::usize;
#[derive(Clone, Copy, PartialEq)]
// probably not a good thing.
pub fn gather_comments_and_literals(span_diagnostic: &diagnostic::SpanHandler,
path: String,
- srdr: &mut old_io::Reader)
+ srdr: &mut Read)
-> (Vec<Comment>, Vec<Literal>) {
- let src = srdr.read_to_end().unwrap();
+ let mut src = Vec::new();
+ srdr.read_to_end(&mut src).unwrap();
let src = String::from_utf8(src).unwrap();
let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src);
// are revised to go directly to token-trees.
/// Is \x00<name>,<ctxt>\x00 is interpreted as encoded ast::Ident?
read_embedded_ident: bool,
+
+ // cache a direct reference to the source text, so that we don't have to
+ // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
+ source_text: Rc<String>
}
impl<'a> Reader for StringReader<'a> {
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into pos and curr
pub fn new_raw<'b>(span_diagnostic: &'b SpanHandler,
- filemap: Rc<codemap::FileMap>) -> StringReader<'b> {
+ filemap: Rc<codemap::FileMap>) -> StringReader<'b> {
+ if filemap.src.is_none() {
+ span_diagnostic.handler.bug(&format!("Cannot lex filemap without source: {}",
+ filemap.name)[..]);
+ }
+
+ let source_text = (*filemap.src.as_ref().unwrap()).clone();
+
let mut sr = StringReader {
span_diagnostic: span_diagnostic,
pos: filemap.start_pos,
peek_tok: token::Eof,
peek_span: codemap::DUMMY_SP,
read_embedded_ident: false,
+ source_text: source_text
};
sr.bump();
sr
m.push_str(": ");
let from = self.byte_offset(from_pos).to_usize();
let to = self.byte_offset(to_pos).to_usize();
- m.push_str(&self.filemap.src[from..to]);
+ m.push_str(&self.source_text[from..to]);
self.fatal_span_(from_pos, to_pos, &m[..]);
}
fn with_str_from_to<T, F>(&self, start: BytePos, end: BytePos, f: F) -> T where
F: FnOnce(&str) -> T,
{
- f(&self.filemap.src[
- self.byte_offset(start).to_usize()..
- self.byte_offset(end).to_usize()])
+ f(&self.source_text[self.byte_offset(start).to_usize()..
+ self.byte_offset(end).to_usize()])
}
/// Converts CRLF to LF in the given string, raising an error on bare CR.
pub fn bump(&mut self) {
self.last_pos = self.pos;
let current_byte_offset = self.byte_offset(self.pos).to_usize();
- if current_byte_offset < self.filemap.src.len() {
+ if current_byte_offset < self.source_text.len() {
assert!(self.curr.is_some());
let last_char = self.curr.unwrap();
- let next = self.filemap
- .src
- .char_range_at(current_byte_offset);
+ let next = self.source_text.char_range_at(current_byte_offset);
let byte_offset_diff = next.next - current_byte_offset;
self.pos = self.pos + Pos::from_usize(byte_offset_diff);
self.curr = Some(next.ch);
pub fn nextch(&self) -> Option<char> {
let offset = self.byte_offset(self.pos).to_usize();
- if offset < self.filemap.src.len() {
- Some(self.filemap.src.char_at(offset))
+ if offset < self.source_text.len() {
+ Some(self.source_text.char_at(offset))
} else {
None
}
pub fn nextnextch(&self) -> Option<char> {
let offset = self.byte_offset(self.pos).to_usize();
- let s = &*self.filemap.src;
+ let s = &self.source_text[..];
if offset >= s.len() { return None }
let str::CharRange { next, .. } = s.char_range_at(offset);
if next < s.len() {
self.span_diagnostic
.span_warn(sp, "\\U00ABCD12 and \\uABCD escapes are deprecated");
self.span_diagnostic
- .span_help(sp, "use \\u{ABCD12} escapes instead");
+ .fileline_help(sp, "use \\u{ABCD12} escapes instead");
}
/// Scan for a single (possibly escaped) byte or char
use ptr::P;
use std::cell::{Cell, RefCell};
-use std::old_io::File;
-use std::rc::Rc;
+use std::fs::File;
+use std::io::Read;
+use std::iter;
use std::num::Int;
+use std::path::{Path, PathBuf};
+use std::rc::Rc;
use std::str;
-use std::iter;
#[macro_use]
pub mod parser;
pub struct ParseSess {
pub span_diagnostic: SpanHandler, // better be the same as the one in the reader!
/// Used to determine and report recursive mod inclusions
- included_mod_stack: RefCell<Vec<Path>>,
+ included_mod_stack: RefCell<Vec<PathBuf>>,
pub node_id: Cell<ast::NodeId>,
}
None => sess.span_diagnostic.handler().fatal(msg),
}
};
- let bytes = match File::open(path).read_to_end() {
- Ok(bytes) => bytes,
+ let mut bytes = Vec::new();
+ match File::open(path).and_then(|mut f| f.read_to_end(&mut bytes)) {
+ Ok(..) => {}
Err(e) => {
- err(&format!("couldn't read {:?}: {}",
- path.display(), e));
- unreachable!()
+ err(&format!("couldn't read {:?}: {}", path.display(), e));
+ unreachable!();
}
};
match str::from_utf8(&bytes[..]).ok() {
Some(s) => {
- return string_to_filemap(sess, s.to_string(),
- path.as_str().unwrap().to_string())
+ string_to_filemap(sess, s.to_string(),
+ path.to_str().unwrap().to_string())
}
None => {
- err(&format!("{:?} is not UTF-8 encoded", path.display()))
+ err(&format!("{:?} is not UTF-8 encoded", path.display()));
+ unreachable!();
}
}
- unreachable!()
}
/// Given a session and a string, add the string to
&suf[1..]));
} else {
sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf));
- sd.span_help(sp, "the suffix must be one of the integral types \
+ sd.fileline_help(sp, "the suffix must be one of the integral types \
(`u32`, `isize`, etc)");
}
#[cfg(test)]
mod test {
use super::*;
+ use std::rc::Rc;
use serialize::json;
use codemap::{Span, BytePos, Pos, Spanned, NO_EXPANSION};
use owned_slice::OwnedSlice;
}
#[test]
- fn string_to_tts_1 () {
+ fn string_to_tts_1() {
let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
- assert_eq!(json::encode(&tts).unwrap(),
- "[\
- {\
- \"variant\":\"TtToken\",\
- \"fields\":[\
- null,\
- {\
- \"variant\":\"Ident\",\
- \"fields\":[\
- \"fn\",\
- \"Plain\"\
- ]\
- }\
- ]\
- },\
- {\
- \"variant\":\"TtToken\",\
- \"fields\":[\
- null,\
- {\
- \"variant\":\"Ident\",\
- \"fields\":[\
- \"a\",\
- \"Plain\"\
- ]\
- }\
- ]\
- },\
- {\
- \"variant\":\"TtDelimited\",\
- \"fields\":[\
- null,\
- {\
- \"delim\":\"Paren\",\
- \"open_span\":null,\
- \"tts\":[\
- {\
- \"variant\":\"TtToken\",\
- \"fields\":[\
- null,\
- {\
- \"variant\":\"Ident\",\
- \"fields\":[\
- \"b\",\
- \"Plain\"\
- ]\
- }\
- ]\
- },\
- {\
- \"variant\":\"TtToken\",\
- \"fields\":[\
- null,\
- \"Colon\"\
- ]\
- },\
- {\
- \"variant\":\"TtToken\",\
- \"fields\":[\
- null,\
- {\
- \"variant\":\"Ident\",\
- \"fields\":[\
- \"i32\",\
- \"Plain\"\
- ]\
- }\
- ]\
- }\
- ],\
- \"close_span\":null\
- }\
- ]\
- },\
- {\
- \"variant\":\"TtDelimited\",\
- \"fields\":[\
- null,\
- {\
- \"delim\":\"Brace\",\
- \"open_span\":null,\
- \"tts\":[\
- {\
- \"variant\":\"TtToken\",\
- \"fields\":[\
- null,\
- {\
- \"variant\":\"Ident\",\
- \"fields\":[\
- \"b\",\
- \"Plain\"\
- ]\
- }\
- ]\
- },\
- {\
- \"variant\":\"TtToken\",\
- \"fields\":[\
- null,\
- \"Semi\"\
- ]\
- }\
- ],\
- \"close_span\":null\
- }\
- ]\
- }\
-]"
- );
+
+ let expected = vec![
+ ast::TtToken(sp(0, 2),
+ token::Ident(str_to_ident("fn"),
+ token::IdentStyle::Plain)),
+ ast::TtToken(sp(3, 4),
+ token::Ident(str_to_ident("a"),
+ token::IdentStyle::Plain)),
+ ast::TtDelimited(
+ sp(5, 14),
+ Rc::new(ast::Delimited {
+ delim: token::DelimToken::Paren,
+ open_span: sp(5, 6),
+ tts: vec![
+ ast::TtToken(sp(6, 7),
+ token::Ident(str_to_ident("b"),
+ token::IdentStyle::Plain)),
+ ast::TtToken(sp(8, 9),
+ token::Colon),
+ ast::TtToken(sp(10, 13),
+ token::Ident(str_to_ident("i32"),
+ token::IdentStyle::Plain)),
+ ],
+ close_span: sp(13, 14),
+ })),
+ ast::TtDelimited(
+ sp(15, 21),
+ Rc::new(ast::Delimited {
+ delim: token::DelimToken::Brace,
+ open_span: sp(15, 16),
+ tts: vec![
+ ast::TtToken(sp(17, 18),
+ token::Ident(str_to_ident("b"),
+ token::IdentStyle::Plain)),
+ ast::TtToken(sp(18, 19),
+ token::Semi)
+ ],
+ close_span: sp(20, 21),
+ }))
+ ];
+
+ assert_eq!(tts, expected);
}
#[test] fn ret_expr() {
use owned_slice::OwnedSlice;
use std::collections::HashSet;
-use std::old_io::fs::PathExtensions;
+use std::io::prelude::*;
use std::iter;
use std::mem;
use std::num::Float;
+use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::slice;
}
pub fn span_fatal_help(&self, sp: Span, m: &str, help: &str) -> ! {
self.span_err(sp, m);
- self.span_help(sp, help);
+ self.fileline_help(sp, help);
panic!(diagnostic::FatalError);
}
pub fn span_note(&self, sp: Span, m: &str) {
pub fn span_help(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_help(sp, m)
}
+ pub fn fileline_help(&self, sp: Span, m: &str) {
+ self.sess.span_diagnostic.fileline_help(sp, m)
+ }
pub fn bug(&self, m: &str) -> ! {
self.sess.span_diagnostic.span_bug(self.span, m)
}
Some(f) => f,
None => continue,
};
- self.span_help(last_span,
+ self.fileline_help(last_span,
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
float.trunc() as usize,
&float.fract().to_string()[1..]));
self.span_err(op_span,
"chained comparison operators require parentheses");
if op.node == BiLt && outer_op == BiGt {
- self.span_help(op_span,
+ self.fileline_help(op_span,
"use `::<...>` instead of `<...>` if you meant to specify type arguments");
}
}
match visa {
Public => {
self.span_err(span, "can't qualify macro invocation with `pub`");
- self.span_help(span, "try adjusting the macro to put `pub` inside \
+ self.fileline_help(span, "try adjusting the macro to put `pub` inside \
the invocation");
}
Inherited => (),
outer_attrs: &[ast::Attribute],
id_sp: Span)
-> (ast::Item_, Vec<ast::Attribute> ) {
- let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span));
- prefix.pop();
- let mod_path = Path::new(".").join_many(&self.mod_path_stack);
- let dir_path = prefix.join(&mod_path);
+ let mut prefix = PathBuf::new(&self.sess.span_diagnostic.cm
+ .span_to_filename(self.span));
+ // FIXME(acrichto): right now "a".pop() == "a", but need to confirm with
+ // aturon whether this is expected or not.
+ if prefix.parent().is_some() {
+ prefix.pop();
+ } else {
+ prefix = PathBuf::new("");
+ }
+ let mut dir_path = prefix;
+ for part in &self.mod_path_stack {
+ dir_path.push(&**part);
+ }
let mod_string = token::get_ident(id);
let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name(
outer_attrs, "path") {
- Some(d) => (dir_path.join(d), true),
+ Some(d) => (dir_path.join(&*d), true),
None => {
let mod_name = mod_string.to_string();
let default_path_str = format!("{}.rs", mod_name);
}
fn eval_src_mod_from_path(&mut self,
- path: Path,
+ path: PathBuf,
owns_directory: bool,
name: String,
id_sp: Span) -> (ast::Item_, Vec<ast::Attribute> ) {
let mut err = String::from_str("circular modules: ");
let len = included_mod_stack.len();
for p in &included_mod_stack[i.. len] {
- err.push_str(&p.display().as_cow());
+ err.push_str(&p.to_string_lossy());
err.push_str(" -> ");
}
- err.push_str(&path.display().as_cow());
+ err.push_str(&path.to_string_lossy());
self.span_fatal(id_sp, &err[..]);
}
None => ()
if self.token.is_ident() { self.bump(); }
self.span_err(span, "expected `;`, found `as`");
- self.span_help(span,
+ self.fileline_help(span,
&format!("perhaps you meant to enclose the crate name `{}` in \
a string?",
the_ident.as_str()));
if self.eat_keyword(keywords::Mut) {
let last_span = self.last_span;
self.span_err(last_span, "const globals cannot be mutable");
- self.span_help(last_span, "did you mean to declare a static?");
+ self.fileline_help(last_span, "did you mean to declare a static?");
}
let (ident, item_, extra_attrs) = self.parse_item_const(None);
let last_span = self.last_span;
//! line (which it can't) and so naturally place the content on its own line to
//! avoid combining it with other lines and making matters even worse.
-use std::old_io;
+use std::io;
use std::string;
use std::iter::repeat;
const SIZE_INFINITY: isize = 0xffff;
-pub fn mk_printer(out: Box<old_io::Writer+'static>, linewidth: usize) -> Printer {
+pub fn mk_printer<'a>(out: Box<io::Write+'a>, linewidth: usize) -> Printer<'a> {
// Yes 3, it makes the ring buffers big enough to never
// fall behind.
let n: usize = 3 * linewidth;
/// In this implementation (following the paper, again) the SCAN process is
/// the method called 'pretty_print', and the 'PRINT' process is the method
/// called 'print'.
-pub struct Printer {
- pub out: Box<old_io::Writer+'static>,
+pub struct Printer<'a> {
+ pub out: Box<io::Write+'a>,
buf_len: usize,
/// Width of lines we're constrained to
margin: isize,
pending_indentation: isize,
}
-impl Printer {
+impl<'a> Printer<'a> {
pub fn last_token(&mut self) -> Token {
self.token[self.right].clone()
}
pub fn replace_last_token(&mut self, t: Token) {
self.token[self.right] = t;
}
- pub fn pretty_print(&mut self, token: Token) -> old_io::IoResult<()> {
+ pub fn pretty_print(&mut self, token: Token) -> io::Result<()> {
debug!("pp ~[{},{}]", self.left, self.right);
match token {
Token::Eof => {
}
}
}
- pub fn check_stream(&mut self) -> old_io::IoResult<()> {
+ pub fn check_stream(&mut self) -> io::Result<()> {
debug!("check_stream ~[{}, {}] with left_total={}, right_total={}",
self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space {
self.right %= self.buf_len;
assert!((self.right != self.left));
}
- pub fn advance_left(&mut self) -> old_io::IoResult<()> {
+ pub fn advance_left(&mut self) -> io::Result<()> {
debug!("advance_left ~[{},{}], sizeof({})={}", self.left, self.right,
self.left, self.size[self.left]);
}
}
}
- pub fn print_newline(&mut self, amount: isize) -> old_io::IoResult<()> {
+ pub fn print_newline(&mut self, amount: isize) -> io::Result<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
self.pending_indentation = 0;
}
}
}
- pub fn print_str(&mut self, s: &str) -> old_io::IoResult<()> {
+ pub fn print_str(&mut self, s: &str) -> io::Result<()> {
while self.pending_indentation > 0 {
try!(write!(self.out, " "));
self.pending_indentation -= 1;
}
write!(self.out, "{}", s)
}
- pub fn print(&mut self, token: Token, l: isize) -> old_io::IoResult<()> {
+ pub fn print(&mut self, token: Token, l: isize) -> io::Result<()> {
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
self.space);
debug!("{}", buf_str(&self.token,
// Convenience functions to talk to the printer.
//
// "raw box"
-pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> old_io::IoResult<()> {
+pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::Result<()> {
p.pretty_print(Token::Begin(BeginToken {
offset: indent as isize,
breaks: b
}))
}
-pub fn ibox(p: &mut Printer, indent: usize) -> old_io::IoResult<()> {
+pub fn ibox(p: &mut Printer, indent: usize) -> io::Result<()> {
rbox(p, indent, Breaks::Inconsistent)
}
-pub fn cbox(p: &mut Printer, indent: usize) -> old_io::IoResult<()> {
+pub fn cbox(p: &mut Printer, indent: usize) -> io::Result<()> {
rbox(p, indent, Breaks::Consistent)
}
-pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> old_io::IoResult<()> {
+pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> io::Result<()> {
p.pretty_print(Token::Break(BreakToken {
offset: off,
blank_space: n as isize
}))
}
-pub fn end(p: &mut Printer) -> old_io::IoResult<()> {
+pub fn end(p: &mut Printer) -> io::Result<()> {
p.pretty_print(Token::End)
}
-pub fn eof(p: &mut Printer) -> old_io::IoResult<()> {
+pub fn eof(p: &mut Printer) -> io::Result<()> {
p.pretty_print(Token::Eof)
}
-pub fn word(p: &mut Printer, wrd: &str) -> old_io::IoResult<()> {
+pub fn word(p: &mut Printer, wrd: &str) -> io::Result<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize))
}
-pub fn huge_word(p: &mut Printer, wrd: &str) -> old_io::IoResult<()> {
+pub fn huge_word(p: &mut Printer, wrd: &str) -> io::Result<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), SIZE_INFINITY))
}
-pub fn zero_word(p: &mut Printer, wrd: &str) -> old_io::IoResult<()> {
+pub fn zero_word(p: &mut Printer, wrd: &str) -> io::Result<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
}
-pub fn spaces(p: &mut Printer, n: usize) -> old_io::IoResult<()> {
+pub fn spaces(p: &mut Printer, n: usize) -> io::Result<()> {
break_offset(p, n, 0)
}
-pub fn zerobreak(p: &mut Printer) -> old_io::IoResult<()> {
+pub fn zerobreak(p: &mut Printer) -> io::Result<()> {
spaces(p, 0)
}
-pub fn space(p: &mut Printer) -> old_io::IoResult<()> {
+pub fn space(p: &mut Printer) -> io::Result<()> {
spaces(p, 1)
}
-pub fn hardbreak(p: &mut Printer) -> old_io::IoResult<()> {
+pub fn hardbreak(p: &mut Printer) -> io::Result<()> {
spaces(p, SIZE_INFINITY as usize)
}
use std_inject;
use std::{ascii, mem};
-use std::old_io::{self, IoResult};
+use std::io::{self, Write, Read};
use std::iter;
pub enum AnnNode<'a> {
}
pub trait PpAnn {
- fn pre(&self, _state: &mut State, _node: AnnNode) -> IoResult<()> { Ok(()) }
- fn post(&self, _state: &mut State, _node: AnnNode) -> IoResult<()> { Ok(()) }
+ fn pre(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) }
+ fn post(&self, _state: &mut State, _node: AnnNode) -> io::Result<()> { Ok(()) }
}
#[derive(Copy)]
}
pub struct State<'a> {
- pub s: pp::Printer,
+ pub s: pp::Printer<'a>,
cm: Option<&'a CodeMap>,
comments: Option<Vec<comments::Comment> >,
literals: Option<Vec<comments::Literal> >,
encode_idents_with_hygiene: bool,
}
-pub fn rust_printer(writer: Box<old_io::Writer+'static>) -> State<'static> {
+pub fn rust_printer<'a>(writer: Box<Write+'a>) -> State<'a> {
static NO_ANN: NoAnn = NoAnn;
rust_printer_annotated(writer, &NO_ANN)
}
-pub fn rust_printer_annotated<'a>(writer: Box<old_io::Writer+'static>,
+pub fn rust_printer_annotated<'a>(writer: Box<Write+'a>,
ann: &'a PpAnn) -> State<'a> {
State {
s: pp::mk_printer(writer, default_columns),
span_diagnostic: &diagnostic::SpanHandler,
krate: &ast::Crate,
filename: String,
- input: &mut old_io::Reader,
- out: Box<old_io::Writer+'static>,
+ input: &mut Read,
+ out: Box<Write+'a>,
ann: &'a PpAnn,
- is_expanded: bool) -> IoResult<()> {
+ is_expanded: bool) -> io::Result<()> {
let mut s = State::new_from_input(cm,
span_diagnostic,
filename,
pub fn new_from_input(cm: &'a CodeMap,
span_diagnostic: &diagnostic::SpanHandler,
filename: String,
- input: &mut old_io::Reader,
- out: Box<old_io::Writer+'static>,
+ input: &mut Read,
+ out: Box<Write+'a>,
ann: &'a PpAnn,
is_expanded: bool) -> State<'a> {
let (cmnts, lits) = comments::gather_comments_and_literals(
}
pub fn new(cm: &'a CodeMap,
- out: Box<old_io::Writer+'static>,
+ out: Box<Write+'a>,
ann: &'a PpAnn,
comments: Option<Vec<comments::Comment>>,
literals: Option<Vec<comments::Literal>>) -> State<'a> {
}
pub fn to_string<F>(f: F) -> String where
- F: FnOnce(&mut State) -> IoResult<()>,
+ F: FnOnce(&mut State) -> io::Result<()>,
{
use std::raw::TraitObject;
let mut s = rust_printer(box Vec::new());
f(&mut s).unwrap();
eof(&mut s.s).unwrap();
let wr = unsafe {
- // FIXME(pcwalton): A nasty function to extract the string from an `old_io::Writer`
+ // FIXME(pcwalton): A nasty function to extract the string from an `Write`
// that we "know" to be a `Vec<u8>` that works around the lack of checked
// downcasts.
let obj: &TraitObject = mem::transmute(&s.s.out);
pub mod with_hygiene {
use abi;
use ast;
- use std::old_io::IoResult;
+ use std::io;
use super::indent_unit;
// This function is the trick that all the rest of the routines
// hang on.
pub fn to_string_hyg<F>(f: F) -> String where
- F: FnOnce(&mut super::State) -> IoResult<()>,
+ F: FnOnce(&mut super::State) -> io::Result<()>,
{
super::to_string(move |s| {
s.encode_idents_with_hygiene = true;
}
impl<'a> State<'a> {
- pub fn ibox(&mut self, u: usize) -> IoResult<()> {
+ pub fn ibox(&mut self, u: usize) -> io::Result<()> {
self.boxes.push(pp::Breaks::Inconsistent);
pp::ibox(&mut self.s, u)
}
- pub fn end(&mut self) -> IoResult<()> {
+ pub fn end(&mut self) -> io::Result<()> {
self.boxes.pop().unwrap();
pp::end(&mut self.s)
}
- pub fn cbox(&mut self, u: usize) -> IoResult<()> {
+ pub fn cbox(&mut self, u: usize) -> io::Result<()> {
self.boxes.push(pp::Breaks::Consistent);
pp::cbox(&mut self.s, u)
}
// "raw box"
- pub fn rbox(&mut self, u: usize, b: pp::Breaks) -> IoResult<()> {
+ pub fn rbox(&mut self, u: usize, b: pp::Breaks) -> io::Result<()> {
self.boxes.push(b);
pp::rbox(&mut self.s, u, b)
}
- pub fn nbsp(&mut self) -> IoResult<()> { word(&mut self.s, " ") }
+ pub fn nbsp(&mut self) -> io::Result<()> { word(&mut self.s, " ") }
- pub fn word_nbsp(&mut self, w: &str) -> IoResult<()> {
+ pub fn word_nbsp(&mut self, w: &str) -> io::Result<()> {
try!(word(&mut self.s, w));
self.nbsp()
}
- pub fn word_space(&mut self, w: &str) -> IoResult<()> {
+ pub fn word_space(&mut self, w: &str) -> io::Result<()> {
try!(word(&mut self.s, w));
space(&mut self.s)
}
- pub fn popen(&mut self) -> IoResult<()> { word(&mut self.s, "(") }
+ pub fn popen(&mut self) -> io::Result<()> { word(&mut self.s, "(") }
- pub fn pclose(&mut self) -> IoResult<()> { word(&mut self.s, ")") }
+ pub fn pclose(&mut self) -> io::Result<()> { word(&mut self.s, ")") }
- pub fn head(&mut self, w: &str) -> IoResult<()> {
+ pub fn head(&mut self, w: &str) -> io::Result<()> {
// outer-box is consistent
try!(self.cbox(indent_unit));
// head-box is inconsistent
Ok(())
}
- pub fn bopen(&mut self) -> IoResult<()> {
+ pub fn bopen(&mut self) -> io::Result<()> {
try!(word(&mut self.s, "{"));
self.end() // close the head-box
}
pub fn bclose_(&mut self, span: codemap::Span,
- indented: usize) -> IoResult<()> {
+ indented: usize) -> io::Result<()> {
self.bclose_maybe_open(span, indented, true)
}
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
- indented: usize, close_box: bool) -> IoResult<()> {
+ indented: usize, close_box: bool) -> io::Result<()> {
try!(self.maybe_print_comment(span.hi));
try!(self.break_offset_if_not_bol(1, -(indented as isize)));
try!(word(&mut self.s, "}"));
}
Ok(())
}
- pub fn bclose(&mut self, span: codemap::Span) -> IoResult<()> {
+ pub fn bclose(&mut self, span: codemap::Span) -> io::Result<()> {
self.bclose_(span, indent_unit)
}
}
}
- pub fn hardbreak_if_not_bol(&mut self) -> IoResult<()> {
+ pub fn hardbreak_if_not_bol(&mut self) -> io::Result<()> {
if !self.is_bol() {
try!(hardbreak(&mut self.s))
}
Ok(())
}
- pub fn space_if_not_bol(&mut self) -> IoResult<()> {
+ pub fn space_if_not_bol(&mut self) -> io::Result<()> {
if !self.is_bol() { try!(space(&mut self.s)); }
Ok(())
}
pub fn break_offset_if_not_bol(&mut self, n: usize,
- off: isize) -> IoResult<()> {
+ off: isize) -> io::Result<()> {
if !self.is_bol() {
break_offset(&mut self.s, n, off)
} else {
// Synthesizes a comment that was not textually present in the original source
// file.
- pub fn synth_comment(&mut self, text: String) -> IoResult<()> {
+ pub fn synth_comment(&mut self, text: String) -> io::Result<()> {
try!(word(&mut self.s, "/*"));
try!(space(&mut self.s));
try!(word(&mut self.s, &text[..]));
word(&mut self.s, "*/")
}
- pub fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> IoResult<()> where
- F: FnMut(&mut State, &T) -> IoResult<()>,
+ pub fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> io::Result<()> where
+ F: FnMut(&mut State, &T) -> io::Result<()>,
{
try!(self.rbox(0, b));
let mut first = true;
b: Breaks,
elts: &[T],
mut op: F,
- mut get_span: G) -> IoResult<()> where
- F: FnMut(&mut State, &T) -> IoResult<()>,
+ mut get_span: G) -> io::Result<()> where
+ F: FnMut(&mut State, &T) -> io::Result<()>,
G: FnMut(&T) -> codemap::Span,
{
try!(self.rbox(0, b));
}
pub fn commasep_exprs(&mut self, b: Breaks,
- exprs: &[P<ast::Expr>]) -> IoResult<()> {
+ exprs: &[P<ast::Expr>]) -> io::Result<()> {
self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&**e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &ast::Mod,
- attrs: &[ast::Attribute]) -> IoResult<()> {
+ attrs: &[ast::Attribute]) -> io::Result<()> {
try!(self.print_inner_attributes(attrs));
for item in &_mod.items {
try!(self.print_item(&**item));
}
pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod,
- attrs: &[ast::Attribute]) -> IoResult<()> {
+ attrs: &[ast::Attribute]) -> io::Result<()> {
try!(self.print_inner_attributes(attrs));
for item in &nmod.items {
try!(self.print_foreign_item(&**item));
}
pub fn print_opt_lifetime(&mut self,
- lifetime: &Option<ast::Lifetime>) -> IoResult<()> {
+ lifetime: &Option<ast::Lifetime>) -> io::Result<()> {
if let Some(l) = *lifetime {
try!(self.print_lifetime(&l));
try!(self.nbsp());
Ok(())
}
- pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> {
+ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
try!(self.maybe_print_comment(ty.span.lo));
try!(self.ibox(0));
match ty.node {
}
pub fn print_foreign_item(&mut self,
- item: &ast::ForeignItem) -> IoResult<()> {
+ item: &ast::ForeignItem) -> io::Result<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(&item.attrs));
}
fn print_associated_type(&mut self, typedef: &ast::AssociatedType)
- -> IoResult<()>
+ -> io::Result<()>
{
try!(self.print_outer_attributes(&typedef.attrs));
try!(self.word_space("type"));
word(&mut self.s, ";")
}
- fn print_typedef(&mut self, typedef: &ast::Typedef) -> IoResult<()> {
+ fn print_typedef(&mut self, typedef: &ast::Typedef) -> io::Result<()> {
try!(self.word_space("type"));
try!(self.print_ident(typedef.ident));
try!(space(&mut self.s));
}
/// Pretty-print an item
- pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> {
+ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(item.span.lo));
try!(self.print_outer_attributes(&item.attrs));
self.ann.post(self, NodeItem(item))
}
- fn print_trait_ref(&mut self, t: &ast::TraitRef) -> IoResult<()> {
+ fn print_trait_ref(&mut self, t: &ast::TraitRef) -> io::Result<()> {
self.print_path(&t.path, false, 0)
}
- fn print_formal_lifetime_list(&mut self, lifetimes: &[ast::LifetimeDef]) -> IoResult<()> {
+ fn print_formal_lifetime_list(&mut self, lifetimes: &[ast::LifetimeDef]) -> io::Result<()> {
if !lifetimes.is_empty() {
try!(word(&mut self.s, "for<"));
let mut comma = false;
Ok(())
}
- fn print_poly_trait_ref(&mut self, t: &ast::PolyTraitRef) -> IoResult<()> {
+ fn print_poly_trait_ref(&mut self, t: &ast::PolyTraitRef) -> io::Result<()> {
try!(self.print_formal_lifetime_list(&t.bound_lifetimes));
self.print_trait_ref(&t.trait_ref)
}
pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef,
generics: &ast::Generics, ident: ast::Ident,
span: codemap::Span,
- visibility: ast::Visibility) -> IoResult<()> {
+ visibility: ast::Visibility) -> io::Result<()> {
try!(self.head(&visibility_qualified(visibility, "enum")));
try!(self.print_ident(ident));
try!(self.print_generics(generics));
pub fn print_variants(&mut self,
variants: &[P<ast::Variant>],
- span: codemap::Span) -> IoResult<()> {
+ span: codemap::Span) -> io::Result<()> {
try!(self.bopen());
for v in variants {
try!(self.space_if_not_bol());
self.bclose(span)
}
- pub fn print_visibility(&mut self, vis: ast::Visibility) -> IoResult<()> {
+ pub fn print_visibility(&mut self, vis: ast::Visibility) -> io::Result<()> {
match vis {
ast::Public => self.word_nbsp("pub"),
ast::Inherited => Ok(())
struct_def: &ast::StructDef,
generics: &ast::Generics,
ident: ast::Ident,
- span: codemap::Span) -> IoResult<()> {
+ span: codemap::Span) -> io::Result<()> {
try!(self.print_ident(ident));
try!(self.print_generics(generics));
if ast_util::struct_def_is_tuple_like(struct_def) {
/// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think.
- pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
+ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> {
match *tt {
ast::TtToken(_, ref tk) => {
try!(word(&mut self.s, &token_to_string(tk)));
}
}
- pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> IoResult<()> {
+ pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> {
try!(self.ibox(0));
let mut suppress_space = false;
for (i, tt) in tts.iter().enumerate() {
self.end()
}
- pub fn print_variant(&mut self, v: &ast::Variant) -> IoResult<()> {
+ pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
try!(self.print_visibility(v.node.vis));
match v.node.kind {
ast::TupleVariantKind(ref args) => {
}
}
- pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> {
+ pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> io::Result<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(m.span.lo));
try!(self.print_outer_attributes(&m.attrs));
}
pub fn print_trait_method(&mut self,
- m: &ast::TraitItem) -> IoResult<()> {
+ m: &ast::TraitItem) -> io::Result<()> {
match *m {
RequiredMethod(ref ty_m) => self.print_ty_method(ty_m),
ProvidedMethod(ref m) => self.print_method(&**m),
}
}
- pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> IoResult<()> {
+ pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
match *ii {
MethodImplItem(ref m) => self.print_method(&**m),
TypeImplItem(ref td) => self.print_typedef(&**td),
}
}
- pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> {
+ pub fn print_method(&mut self, meth: &ast::Method) -> io::Result<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(meth.span.lo));
try!(self.print_outer_attributes(&meth.attrs));
}
pub fn print_outer_attributes(&mut self,
- attrs: &[ast::Attribute]) -> IoResult<()> {
+ attrs: &[ast::Attribute]) -> io::Result<()> {
let mut count = 0;
for attr in attrs {
match attr.node.style {
}
pub fn print_inner_attributes(&mut self,
- attrs: &[ast::Attribute]) -> IoResult<()> {
+ attrs: &[ast::Attribute]) -> io::Result<()> {
let mut count = 0;
for attr in attrs {
match attr.node.style {
Ok(())
}
- pub fn print_attribute(&mut self, attr: &ast::Attribute) -> IoResult<()> {
+ pub fn print_attribute(&mut self, attr: &ast::Attribute) -> io::Result<()> {
try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(attr.span.lo));
if attr.node.is_sugared_doc {
}
- pub fn print_stmt(&mut self, st: &ast::Stmt) -> IoResult<()> {
+ pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
try!(self.maybe_print_comment(st.span.lo));
match st.node {
ast::StmtDecl(ref decl, _) => {
self.maybe_print_trailing_comment(st.span, None)
}
- pub fn print_block(&mut self, blk: &ast::Block) -> IoResult<()> {
+ pub fn print_block(&mut self, blk: &ast::Block) -> io::Result<()> {
self.print_block_with_attrs(blk, &[])
}
- pub fn print_block_unclosed(&mut self, blk: &ast::Block) -> IoResult<()> {
+ pub fn print_block_unclosed(&mut self, blk: &ast::Block) -> io::Result<()> {
self.print_block_unclosed_indent(blk, indent_unit)
}
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
- indented: usize) -> IoResult<()> {
+ indented: usize) -> io::Result<()> {
self.print_block_maybe_unclosed(blk, indented, &[], false)
}
pub fn print_block_with_attrs(&mut self,
blk: &ast::Block,
- attrs: &[ast::Attribute]) -> IoResult<()> {
+ attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_block_maybe_unclosed(blk, indent_unit, attrs, true)
}
blk: &ast::Block,
indented: usize,
attrs: &[ast::Attribute],
- close_box: bool) -> IoResult<()> {
+ close_box: bool) -> io::Result<()> {
match blk.rules {
ast::UnsafeBlock(..) => try!(self.word_space("unsafe")),
ast::DefaultBlock => ()
self.ann.post(self, NodeBlock(blk))
}
- fn print_else(&mut self, els: Option<&ast::Expr>) -> IoResult<()> {
+ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
match els {
Some(_else) => {
match _else.node {
}
pub fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block,
- elseopt: Option<&ast::Expr>) -> IoResult<()> {
+ elseopt: Option<&ast::Expr>) -> io::Result<()> {
try!(self.head("if"));
try!(self.print_expr(test));
try!(space(&mut self.s));
}
pub fn print_if_let(&mut self, pat: &ast::Pat, expr: &ast::Expr, blk: &ast::Block,
- elseopt: Option<&ast::Expr>) -> IoResult<()> {
+ elseopt: Option<&ast::Expr>) -> io::Result<()> {
try!(self.head("if let"));
try!(self.print_pat(pat));
try!(space(&mut self.s));
}
pub fn print_mac(&mut self, m: &ast::Mac, delim: token::DelimToken)
- -> IoResult<()> {
+ -> io::Result<()> {
match m.node {
// I think it's reasonable to hide the ctxt here:
ast::MacInvocTT(ref pth, ref tts, _) => {
}
- fn print_call_post(&mut self, args: &[P<ast::Expr>]) -> IoResult<()> {
+ fn print_call_post(&mut self, args: &[P<ast::Expr>]) -> io::Result<()> {
try!(self.popen());
try!(self.commasep_exprs(Inconsistent, args));
self.pclose()
}
- pub fn print_expr_maybe_paren(&mut self, expr: &ast::Expr) -> IoResult<()> {
+ pub fn print_expr_maybe_paren(&mut self, expr: &ast::Expr) -> io::Result<()> {
let needs_par = needs_parentheses(expr);
if needs_par {
try!(self.popen());
fn print_expr_box(&mut self,
place: &Option<P<ast::Expr>>,
- expr: &ast::Expr) -> IoResult<()> {
+ expr: &ast::Expr) -> io::Result<()> {
try!(word(&mut self.s, "box"));
try!(word(&mut self.s, "("));
try!(place.as_ref().map_or(Ok(()), |e|self.print_expr(&**e)));
self.print_expr(expr)
}
- fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>]) -> IoResult<()> {
+ fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>]) -> io::Result<()> {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.commasep_exprs(Inconsistent, &exprs[..]));
fn print_expr_repeat(&mut self,
element: &ast::Expr,
- count: &ast::Expr) -> IoResult<()> {
+ count: &ast::Expr) -> io::Result<()> {
try!(self.ibox(indent_unit));
try!(word(&mut self.s, "["));
try!(self.print_expr(element));
fn print_expr_struct(&mut self,
path: &ast::Path,
fields: &[ast::Field],
- wth: &Option<P<ast::Expr>>) -> IoResult<()> {
+ wth: &Option<P<ast::Expr>>) -> io::Result<()> {
try!(self.print_path(path, true, 0));
if !(fields.is_empty() && wth.is_none()) {
try!(word(&mut self.s, "{"));
Ok(())
}
- fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>]) -> IoResult<()> {
+ fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>]) -> io::Result<()> {
try!(self.popen());
try!(self.commasep_exprs(Inconsistent, &exprs[..]));
if exprs.len() == 1 {
fn print_expr_call(&mut self,
func: &ast::Expr,
- args: &[P<ast::Expr>]) -> IoResult<()> {
+ args: &[P<ast::Expr>]) -> io::Result<()> {
try!(self.print_expr_maybe_paren(func));
self.print_call_post(args)
}
fn print_expr_method_call(&mut self,
ident: ast::SpannedIdent,
tys: &[P<ast::Ty>],
- args: &[P<ast::Expr>]) -> IoResult<()> {
+ args: &[P<ast::Expr>]) -> io::Result<()> {
let base_args = &args[1..];
try!(self.print_expr(&*args[0]));
try!(word(&mut self.s, "."));
fn print_expr_binary(&mut self,
op: ast::BinOp,
lhs: &ast::Expr,
- rhs: &ast::Expr) -> IoResult<()> {
+ rhs: &ast::Expr) -> io::Result<()> {
try!(self.print_expr(lhs));
try!(space(&mut self.s));
try!(self.word_space(ast_util::binop_to_string(op.node)));
fn print_expr_unary(&mut self,
op: ast::UnOp,
- expr: &ast::Expr) -> IoResult<()> {
+ expr: &ast::Expr) -> io::Result<()> {
try!(word(&mut self.s, ast_util::unop_to_string(op)));
self.print_expr_maybe_paren(expr)
}
fn print_expr_addr_of(&mut self,
mutability: ast::Mutability,
- expr: &ast::Expr) -> IoResult<()> {
+ expr: &ast::Expr) -> io::Result<()> {
try!(word(&mut self.s, "&"));
try!(self.print_mutability(mutability));
self.print_expr_maybe_paren(expr)
}
- pub fn print_expr(&mut self, expr: &ast::Expr) -> IoResult<()> {
+ pub fn print_expr(&mut self, expr: &ast::Expr) -> io::Result<()> {
try!(self.maybe_print_comment(expr.span.lo));
try!(self.ibox(indent_unit));
try!(self.ann.pre(self, NodeExpr(expr)));
self.end()
}
- pub fn print_local_decl(&mut self, loc: &ast::Local) -> IoResult<()> {
+ pub fn print_local_decl(&mut self, loc: &ast::Local) -> io::Result<()> {
try!(self.print_pat(&*loc.pat));
if let Some(ref ty) = loc.ty {
try!(self.word_space(":"));
Ok(())
}
- pub fn print_decl(&mut self, decl: &ast::Decl) -> IoResult<()> {
+ pub fn print_decl(&mut self, decl: &ast::Decl) -> io::Result<()> {
try!(self.maybe_print_comment(decl.span.lo));
match decl.node {
ast::DeclLocal(ref loc) => {
}
}
- pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> {
+ pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> {
if self.encode_idents_with_hygiene {
let encoded = ident.encode_with_hygiene();
try!(word(&mut self.s, &encoded[..]))
self.ann.post(self, NodeIdent(&ident))
}
- pub fn print_usize(&mut self, i: usize) -> IoResult<()> {
+ pub fn print_usize(&mut self, i: usize) -> io::Result<()> {
word(&mut self.s, &i.to_string())
}
- pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
+ pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> {
try!(word(&mut self.s, &token::get_name(name)));
self.ann.post(self, NodeName(&name))
}
pub fn print_for_decl(&mut self, loc: &ast::Local,
- coll: &ast::Expr) -> IoResult<()> {
+ coll: &ast::Expr) -> io::Result<()> {
try!(self.print_local_decl(loc));
try!(space(&mut self.s));
try!(self.word_space("in"));
path: &ast::Path,
colons_before_params: bool,
depth: usize)
- -> IoResult<()>
+ -> io::Result<()>
{
try!(self.maybe_print_comment(path.span.lo));
path: &ast::Path,
qself: &ast::QSelf,
colons_before_params: bool)
- -> IoResult<()>
+ -> io::Result<()>
{
try!(word(&mut self.s, "<"));
try!(self.print_type(&qself.ty));
fn print_path_parameters(&mut self,
parameters: &ast::PathParameters,
colons_before_params: bool)
- -> IoResult<()>
+ -> io::Result<()>
{
if parameters.is_empty() {
return Ok(());
Ok(())
}
- pub fn print_pat(&mut self, pat: &ast::Pat) -> IoResult<()> {
+ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
try!(self.maybe_print_comment(pat.span.lo));
try!(self.ann.pre(self, NodePat(pat)));
/* Pat isn't normalized, but the beauty of it
self.ann.post(self, NodePat(pat))
}
- fn print_arm(&mut self, arm: &ast::Arm) -> IoResult<()> {
+ fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> {
// I have no idea why this check is necessary, but here it
// is :(
if arm.attrs.is_empty() {
// Returns whether it printed anything
fn print_explicit_self(&mut self,
explicit_self: &ast::ExplicitSelf_,
- mutbl: ast::Mutability) -> IoResult<bool> {
+ mutbl: ast::Mutability) -> io::Result<bool> {
try!(self.print_mutability(mutbl));
match *explicit_self {
ast::SelfStatic => { return Ok(false); }
name: ast::Ident,
generics: &ast::Generics,
opt_explicit_self: Option<&ast::ExplicitSelf_>,
- vis: ast::Visibility) -> IoResult<()> {
+ vis: ast::Visibility) -> io::Result<()> {
try!(self.head(""));
try!(self.print_fn_header_info(unsafety, abi, vis));
try!(self.nbsp());
pub fn print_fn_args(&mut self, decl: &ast::FnDecl,
opt_explicit_self: Option<&ast::ExplicitSelf_>)
- -> IoResult<()> {
+ -> io::Result<()> {
// It is unfortunate to duplicate the commasep logic, but we want the
// self type and the args all in the same box.
try!(self.rbox(0, Inconsistent));
pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl,
opt_explicit_self: Option<&ast::ExplicitSelf_>)
- -> IoResult<()> {
+ -> io::Result<()> {
try!(self.popen());
try!(self.print_fn_args(decl, opt_explicit_self));
if decl.variadic {
pub fn print_fn_block_args(
&mut self,
decl: &ast::FnDecl)
- -> IoResult<()> {
+ -> io::Result<()> {
try!(word(&mut self.s, "|"));
try!(self.print_fn_args(decl, None));
try!(word(&mut self.s, "|"));
}
pub fn print_capture_clause(&mut self, capture_clause: ast::CaptureClause)
- -> IoResult<()> {
+ -> io::Result<()> {
match capture_clause {
ast::CaptureByValue => self.word_space("move"),
ast::CaptureByRef => Ok(()),
pub fn print_bounds(&mut self,
prefix: &str,
bounds: &[ast::TyParamBound])
- -> IoResult<()> {
+ -> io::Result<()> {
if !bounds.is_empty() {
try!(word(&mut self.s, prefix));
let mut first = true;
pub fn print_lifetime(&mut self,
lifetime: &ast::Lifetime)
- -> IoResult<()>
+ -> io::Result<()>
{
self.print_name(lifetime.name)
}
pub fn print_lifetime_def(&mut self,
lifetime: &ast::LifetimeDef)
- -> IoResult<()>
+ -> io::Result<()>
{
try!(self.print_lifetime(&lifetime.lifetime));
let mut sep = ":";
pub fn print_generics(&mut self,
generics: &ast::Generics)
- -> IoResult<()>
+ -> io::Result<()>
{
let total = generics.lifetimes.len() + generics.ty_params.len();
if total == 0 {
Ok(())
}
- pub fn print_ty_param(&mut self, param: &ast::TyParam) -> IoResult<()> {
+ pub fn print_ty_param(&mut self, param: &ast::TyParam) -> io::Result<()> {
try!(self.print_ident(param.ident));
try!(self.print_bounds(":", ¶m.bounds));
match param.default {
}
pub fn print_where_clause(&mut self, generics: &ast::Generics)
- -> IoResult<()> {
+ -> io::Result<()> {
if generics.where_clause.predicates.len() == 0 {
return Ok(())
}
Ok(())
}
- pub fn print_meta_item(&mut self, item: &ast::MetaItem) -> IoResult<()> {
+ pub fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> {
try!(self.ibox(indent_unit));
match item.node {
ast::MetaWord(ref name) => {
self.end()
}
- pub fn print_view_path(&mut self, vp: &ast::ViewPath) -> IoResult<()> {
+ pub fn print_view_path(&mut self, vp: &ast::ViewPath) -> io::Result<()> {
match vp.node {
ast::ViewPathSimple(ident, ref path) => {
try!(self.print_path(path, false, 0));
}
pub fn print_mutability(&mut self,
- mutbl: ast::Mutability) -> IoResult<()> {
+ mutbl: ast::Mutability) -> io::Result<()> {
match mutbl {
ast::MutMutable => self.word_nbsp("mut"),
ast::MutImmutable => Ok(()),
}
}
- pub fn print_mt(&mut self, mt: &ast::MutTy) -> IoResult<()> {
+ pub fn print_mt(&mut self, mt: &ast::MutTy) -> io::Result<()> {
try!(self.print_mutability(mt.mutbl));
self.print_type(&*mt.ty)
}
- pub fn print_arg(&mut self, input: &ast::Arg) -> IoResult<()> {
+ pub fn print_arg(&mut self, input: &ast::Arg) -> io::Result<()> {
try!(self.ibox(indent_unit));
match input.ty.node {
ast::TyInfer => try!(self.print_pat(&*input.pat)),
self.end()
}
- pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> IoResult<()> {
+ pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> io::Result<()> {
if let ast::DefaultReturn(..) = decl.output {
return Ok(());
}
id: Option<ast::Ident>,
generics: &ast::Generics,
opt_explicit_self: Option<&ast::ExplicitSelf_>)
- -> IoResult<()> {
+ -> io::Result<()> {
try!(self.ibox(indent_unit));
try!(self.print_fn_header_info(Some(unsafety), abi, ast::Inherited));
pub fn maybe_print_trailing_comment(&mut self, span: codemap::Span,
next_pos: Option<BytePos>)
- -> IoResult<()> {
+ -> io::Result<()> {
let cm = match self.cm {
Some(cm) => cm,
_ => return Ok(())
Ok(())
}
- pub fn print_remaining_comments(&mut self) -> IoResult<()> {
+ pub fn print_remaining_comments(&mut self) -> io::Result<()> {
// If there aren't any remaining comments, then we need to manually
// make sure there is a line break at the end.
if self.next_comment().is_none() {
Ok(())
}
- pub fn print_literal(&mut self, lit: &ast::Lit) -> IoResult<()> {
+ pub fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
try!(self.maybe_print_comment(lit.span.lo));
match self.next_lit(lit.span.lo) {
Some(ref ltrl) => {
}
}
- pub fn maybe_print_comment(&mut self, pos: BytePos) -> IoResult<()> {
+ pub fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> {
loop {
match self.next_comment() {
Some(ref cmnt) => {
}
pub fn print_comment(&mut self,
- cmnt: &comments::Comment) -> IoResult<()> {
+ cmnt: &comments::Comment) -> io::Result<()> {
match cmnt.style {
comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1);
}
pub fn print_string(&mut self, st: &str,
- style: ast::StrStyle) -> IoResult<()> {
+ style: ast::StrStyle) -> io::Result<()> {
let st = match style {
ast::CookedStr => {
(format!("\"{}\"", st.escape_default()))
}
pub fn print_opt_unsafety(&mut self,
- opt_unsafety: Option<ast::Unsafety>) -> IoResult<()> {
+ opt_unsafety: Option<ast::Unsafety>) -> io::Result<()> {
match opt_unsafety {
Some(unsafety) => self.print_unsafety(unsafety),
None => Ok(())
pub fn print_opt_abi_and_extern_if_nondefault(&mut self,
opt_abi: Option<abi::Abi>)
- -> IoResult<()> {
+ -> io::Result<()> {
match opt_abi {
Some(abi::Rust) => Ok(()),
Some(abi) => {
}
pub fn print_extern_opt_abi(&mut self,
- opt_abi: Option<abi::Abi>) -> IoResult<()> {
+ opt_abi: Option<abi::Abi>) -> io::Result<()> {
match opt_abi {
Some(abi) => {
try!(self.word_nbsp("extern"));
pub fn print_fn_header_info(&mut self,
opt_unsafety: Option<ast::Unsafety>,
abi: abi::Abi,
- vis: ast::Visibility) -> IoResult<()> {
+ vis: ast::Visibility) -> io::Result<()> {
try!(word(&mut self.s, &visibility_qualified(vis, "")));
try!(self.print_opt_unsafety(opt_unsafety));
word(&mut self.s, "fn")
}
- pub fn print_unsafety(&mut self, s: ast::Unsafety) -> IoResult<()> {
+ pub fn print_unsafety(&mut self, s: ast::Unsafety) -> io::Result<()> {
match s {
ast::Unsafety::Normal => Ok(()),
ast::Unsafety::Unsafe => self.word_nbsp("unsafe"),
callee: NameAndSpan {
name: "test".to_string(),
format: MacroAttribute,
- span: None
+ span: None,
+ allow_internal_unstable: false,
}
});
callee: NameAndSpan {
name: "test".to_string(),
format: MacroAttribute,
- span: None
+ span: None,
+ allow_internal_unstable: true,
}
};
let expn_id = cx.sess.span_diagnostic.cm.record_expansion(info);
#![feature(box_syntax)]
#![feature(collections)]
+#![feature(fs)]
#![feature(int_uint)]
+#![feature(io)]
#![feature(old_io)]
-#![feature(old_path)]
+#![feature(path)]
#![feature(rustc_private)]
#![feature(staged_api)]
-#![feature(unicode)]
#![feature(std_misc)]
-#![feature(os)]
+#![feature(unicode)]
#![cfg_attr(windows, feature(libc))]
#[macro_use] extern crate log;
/// Number for a terminal color
pub type Color = u16;
- pub const BLACK: Color = 0u16;
- pub const RED: Color = 1u16;
- pub const GREEN: Color = 2u16;
- pub const YELLOW: Color = 3u16;
- pub const BLUE: Color = 4u16;
- pub const MAGENTA: Color = 5u16;
- pub const CYAN: Color = 6u16;
- pub const WHITE: Color = 7u16;
-
- pub const BRIGHT_BLACK: Color = 8u16;
- pub const BRIGHT_RED: Color = 9u16;
- pub const BRIGHT_GREEN: Color = 10u16;
- pub const BRIGHT_YELLOW: Color = 11u16;
- pub const BRIGHT_BLUE: Color = 12u16;
- pub const BRIGHT_MAGENTA: Color = 13u16;
- pub const BRIGHT_CYAN: Color = 14u16;
- pub const BRIGHT_WHITE: Color = 15u16;
+ pub const BLACK: Color = 0;
+ pub const RED: Color = 1;
+ pub const GREEN: Color = 2;
+ pub const YELLOW: Color = 3;
+ pub const BLUE: Color = 4;
+ pub const MAGENTA: Color = 5;
+ pub const CYAN: Color = 6;
+ pub const WHITE: Color = 7;
+
+ pub const BRIGHT_BLACK: Color = 8;
+ pub const BRIGHT_RED: Color = 9;
+ pub const BRIGHT_GREEN: Color = 10;
+ pub const BRIGHT_YELLOW: Color = 11;
+ pub const BRIGHT_BLUE: Color = 12;
+ pub const BRIGHT_MAGENTA: Color = 13;
+ pub const BRIGHT_CYAN: Color = 14;
+ pub const BRIGHT_WHITE: Color = 15;
}
/// Terminal attributes
// if c is 0, use 0200 (128) for ncurses compatibility
Number(c) => {
output.push(if c == 0 {
- 128u8
+ 128
} else {
c as u8
})
#[test]
fn test_comparison_ops() {
- let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])];
+ let v = [('<', [1, 0, 0]), ('=', [0, 1, 0]), ('>', [0, 0, 1])];
for &(op, bs) in &v {
let s = format!("%{{1}}%{{2}}%{}%d", op);
let res = expand(s.as_bytes(), &[], &mut Variables::new());
//! ncurses-compatible compiled terminfo format parsing (term(5))
use std::collections::HashMap;
-use std::old_io;
+use std::io::prelude::*;
+use std::io;
use super::super::TermInfo;
// These are the orders ncurses uses in its compiled format (as of 5.9). Not sure if portable.
"box1"];
/// Parse a compiled terminfo entry, using long capability names if `longnames` is true
-pub fn parse(file: &mut old_io::Reader, longnames: bool)
+pub fn parse(file: &mut Read, longnames: bool)
-> Result<Box<TermInfo>, String> {
macro_rules! try { ($e:expr) => (
match $e {
}
// Check magic number
- let magic = try!(file.read_le_u16());
+ let magic = try!(read_le_u16(file));
if magic != 0x011A {
return Err(format!("invalid magic number: expected {:x}, found {:x}",
0x011A as usize, magic as usize));
}
- let names_bytes = try!(file.read_le_i16()) as int;
- let bools_bytes = try!(file.read_le_i16()) as int;
- let numbers_count = try!(file.read_le_i16()) as int;
- let string_offsets_count = try!(file.read_le_i16()) as int;
- let string_table_bytes = try!(file.read_le_i16()) as int;
+ let names_bytes = try!(read_le_u16(file)) as int;
+ let bools_bytes = try!(read_le_u16(file)) as int;
+ let numbers_count = try!(read_le_u16(file)) as int;
+ let string_offsets_count = try!(read_le_u16(file)) as int;
+ let string_table_bytes = try!(read_le_u16(file)) as int;
assert!(names_bytes > 0);
}
// don't read NUL
- let bytes = try!(file.read_exact(names_bytes as uint - 1));
+ let bytes = try!(read_exact(file, names_bytes as uint - 1));
let names_str = match String::from_utf8(bytes) {
Ok(s) => s,
Err(_) => return Err("input not utf-8".to_string()),
.map(|s| s.to_string())
.collect();
- try!(file.read_byte()); // consume NUL
+ try!(read_byte(file)); // consume NUL
let mut bools_map = HashMap::new();
if bools_bytes != 0 {
for i in 0..bools_bytes {
- let b = try!(file.read_byte());
+ let b = try!(read_byte(file));
if b == 1 {
bools_map.insert(bnames[i as uint].to_string(), true);
}
}
if (bools_bytes + names_bytes) % 2 == 1 {
- try!(file.read_byte()); // compensate for padding
+ try!(read_byte(file)); // compensate for padding
}
let mut numbers_map = HashMap::new();
if numbers_count != 0 {
for i in 0..numbers_count {
- let n = try!(file.read_le_u16());
+ let n = try!(read_le_u16(file));
if n != 0xFFFF {
numbers_map.insert(nnames[i as uint].to_string(), n);
}
if string_offsets_count != 0 {
let mut string_offsets = Vec::with_capacity(10);
for _ in 0..string_offsets_count {
- string_offsets.push(try!(file.read_le_u16()));
+ string_offsets.push(try!(read_le_u16(file)));
}
- let string_table = try!(file.read_exact(string_table_bytes as uint));
+ let string_table = try!(read_exact(file, string_table_bytes as usize));
if string_table.len() != string_table_bytes as uint {
return Err("error: hit EOF before end of string \
})
}
+fn read_le_u16<R: Read + ?Sized>(r: &mut R) -> io::Result<u16> {
+ let mut b = [0; 2];
+ assert_eq!(try!(r.read(&mut b)), 2);
+ Ok((b[0] as u16) | ((b[1] as u16) << 8))
+}
+
+fn read_byte<R: Read + ?Sized>(r: &mut R) -> io::Result<u8> {
+ let mut b = [0; 1];
+ assert_eq!(try!(r.read(&mut b)), 1);
+ Ok(b[0])
+}
+
+fn read_exact<R: Read + ?Sized>(r: &mut R, sz: usize) -> io::Result<Vec<u8>> {
+ let mut v = Vec::with_capacity(sz);
+ try!(r.take(sz as u64).read_to_end(&mut v));
+ assert_eq!(v.len(), sz);
+ Ok(v)
+}
+
/// Create a dummy TermInfo struct for msys terminals
pub fn msys_terminfo() -> Box<TermInfo> {
let mut strings = HashMap::new();
//!
//! Does not support hashed database, only filesystem!
-use std::old_io::File;
-use std::old_io::fs::PathExtensions;
use std::env;
+use std::fs::File;
+use std::io::prelude::*;
+use std::path::PathBuf;
/// Return path to database entry for `term`
#[allow(deprecated)]
-pub fn get_dbpath_for_term(term: &str) -> Option<Box<Path>> {
+pub fn get_dbpath_for_term(term: &str) -> Option<Box<PathBuf>> {
if term.len() == 0 {
return None;
}
- let homedir = ::std::os::homedir();
+ let homedir = env::home_dir();
let mut dirs_to_search = Vec::new();
let first_char = term.char_at(0);
// Find search directory
- match env::var("TERMINFO") {
- Ok(dir) => dirs_to_search.push(Path::new(dir)),
- Err(..) => {
+ match env::var_os("TERMINFO") {
+ Some(dir) => dirs_to_search.push(PathBuf::new(&dir)),
+ None => {
if homedir.is_some() {
// ncurses compatibility;
dirs_to_search.push(homedir.unwrap().join(".terminfo"))
match env::var("TERMINFO_DIRS") {
Ok(dirs) => for i in dirs.split(':') {
if i == "" {
- dirs_to_search.push(Path::new("/usr/share/terminfo"));
+ dirs_to_search.push(PathBuf::new("/usr/share/terminfo"));
} else {
- dirs_to_search.push(Path::new(i));
+ dirs_to_search.push(PathBuf::new(i));
}
},
// Found nothing in TERMINFO_DIRS, use the default paths:
// ~/.terminfo, ncurses will search /etc/terminfo, then
// /lib/terminfo, and eventually /usr/share/terminfo.
Err(..) => {
- dirs_to_search.push(Path::new("/etc/terminfo"));
- dirs_to_search.push(Path::new("/lib/terminfo"));
- dirs_to_search.push(Path::new("/usr/share/terminfo"));
+ dirs_to_search.push(PathBuf::new("/etc/terminfo"));
+ dirs_to_search.push(PathBuf::new("/lib/terminfo"));
+ dirs_to_search.push(PathBuf::new("/usr/share/terminfo"));
}
}
}
for p in &dirs_to_search {
if p.exists() {
let f = first_char.to_string();
- let newp = p.join_many(&[&f[..], term]);
+ let newp = p.join(&f).join(term);
if newp.exists() {
return Some(box newp);
}
// on some installations the dir is named after the hex of the char (e.g. OS X)
let f = format!("{:x}", first_char as uint);
- let newp = p.join_many(&[&f[..], term]);
+ let newp = p.join(&f).join(term);
if newp.exists() {
return Some(box newp);
}
fn test_get_dbpath_for_term() {
// woefully inadequate test coverage
// note: current tests won't work with non-standard terminfo hierarchies (e.g. OS X's)
- use std::os::{setenv, unsetenv};
+ use std::env;
// FIXME (#9639): This needs to handle non-utf8 paths
fn x(t: &str) -> String {
let p = get_dbpath_for_term(t).expect("no terminfo entry found");
- p.as_str().unwrap().to_string()
+ p.to_str().unwrap().to_string()
};
assert!(x("screen") == "/usr/share/terminfo/s/screen");
assert!(get_dbpath_for_term("") == None);
- setenv("TERMINFO_DIRS", ":");
+ env::set_var("TERMINFO_DIRS", ":");
assert!(x("screen") == "/usr/share/terminfo/s/screen");
- unsetenv("TERMINFO_DIRS");
+ env::remove_var("TERMINFO_DIRS");
}
#[test]
#![feature(core)]
#![feature(int_uint)]
#![feature(old_io)]
-#![feature(old_path)]
+#![feature(path)]
+#![feature(fs)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(std_misc)]
+#![feature(io)]
extern crate getopts;
extern crate serialize;
use std::any::Any;
use std::cmp;
use std::collections::BTreeMap;
+use std::env;
use std::fmt;
-use std::old_io::stdio::StdWriter;
-use std::old_io::{File, ChanReader, ChanWriter};
-use std::old_io;
+use std::fs::File;
+use std::io::{self, Write};
use std::iter::repeat;
use std::num::{Float, Int};
-use std::env;
+use std::old_io::stdio::StdWriter;
+use std::old_io::{ChanReader, ChanWriter};
+use std::old_io;
+use std::path::{PathBuf};
use std::sync::mpsc::{channel, Sender};
use std::thread;
use std::thunk::{Thunk, Invoke};
pub run_ignored: bool,
pub run_tests: bool,
pub run_benchmarks: bool,
- pub logfile: Option<Path>,
+ pub logfile: Option<PathBuf>,
pub nocapture: bool,
pub color: ColorConfig,
}
let run_ignored = matches.opt_present("ignored");
let logfile = matches.opt_str("logfile");
- let logfile = logfile.map(|s| Path::new(s));
+ let logfile = logfile.map(|s| PathBuf::new(&s));
let run_benchmarks = matches.opt_present("bench");
let run_tests = ! run_benchmarks ||
max_name_len: uint, // number of columns to fill when aligning names
}
+fn new2old(new: io::Error) -> old_io::IoError {
+ old_io::IoError {
+ kind: old_io::OtherIoError,
+ desc: "other error",
+ detail: Some(new.to_string()),
+ }
+}
+
impl<T: Writer> ConsoleTestState<T> {
pub fn new(opts: &TestOpts,
_: Option<T>) -> old_io::IoResult<ConsoleTestState<StdWriter>> {
let log_out = match opts.logfile {
- Some(ref path) => Some(try!(File::create(path))),
+ Some(ref path) => Some(try!(File::create(path).map_err(new2old))),
None => None
};
let out = match term::stdout() {
}
pub fn write_log(&mut self, test: &TestDesc,
- result: &TestResult) -> old_io::IoResult<()> {
+ result: &TestResult) -> io::Result<()> {
match self.log_out {
None => Ok(()),
Some(ref mut o) => {
TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()),
TeWait(ref test, padding) => st.write_test_start(test, padding),
TeResult(test, result, stdout) => {
- try!(st.write_log(&test, &result));
+ try!(st.write_log(&test, &result).map_err(new2old));
try!(st.write_result(&result));
match result {
TrOk => st.passed += 1,
Pretty(_) => unreachable!()
};
- let apos = s.find_str("a").unwrap();
- let bpos = s.find_str("b").unwrap();
+ let apos = s.find("a").unwrap();
+ let bpos = s.find("b").unwrap();
assert!(apos < bpos);
}
pub fn iter<T, F>(&mut self, mut inner: F) where F: FnMut() -> T {
self.dur = Duration::span(|| {
let k = self.iterations;
- for _ in 0u64..k {
+ for _ in 0..k {
black_box(inner());
}
});
// This is a more statistics-driven benchmark algorithm
pub fn auto_bench<F>(&mut self, mut f: F) -> stats::Summary<f64> where F: FnMut(&mut Bencher) {
// Initial bench run to get ballpark figure.
- let mut n = 1_u64;
+ let mut n = 1;
self.bench_n(n, |x| f(x));
// Try to estimate iter count for 1ms falling back to 1m
'\u{2eff}'), ('\u{2fd6}', '\u{2fef}'), ('\u{2ffc}', '\u{2fff}'), ('\u{3040}', '\u{3040}'),
('\u{3097}', '\u{3098}'), ('\u{3100}', '\u{3104}'), ('\u{312e}', '\u{3130}'), ('\u{318f}',
'\u{318f}'), ('\u{31bb}', '\u{31bf}'), ('\u{31e4}', '\u{31ef}'), ('\u{321f}', '\u{321f}'),
- ('\u{32ff}', '\u{32ff}'), ('\u{3401}', '\u{4db4}'), ('\u{4db6}', '\u{4dbf}'), ('\u{4e01}',
- '\u{9fcb}'), ('\u{9fcd}', '\u{9fff}'), ('\u{a48d}', '\u{a48f}'), ('\u{a4c7}', '\u{a4cf}'),
- ('\u{a62c}', '\u{a63f}'), ('\u{a69e}', '\u{a69e}'), ('\u{a6f8}', '\u{a6ff}'), ('\u{a78f}',
- '\u{a78f}'), ('\u{a7ae}', '\u{a7af}'), ('\u{a7b2}', '\u{a7f6}'), ('\u{a82c}', '\u{a82f}'),
- ('\u{a83a}', '\u{a83f}'), ('\u{a878}', '\u{a87f}'), ('\u{a8c5}', '\u{a8cd}'), ('\u{a8da}',
- '\u{a8df}'), ('\u{a8fc}', '\u{a8ff}'), ('\u{a954}', '\u{a95e}'), ('\u{a97d}', '\u{a97f}'),
- ('\u{a9ce}', '\u{a9ce}'), ('\u{a9da}', '\u{a9dd}'), ('\u{a9ff}', '\u{a9ff}'), ('\u{aa37}',
- '\u{aa3f}'), ('\u{aa4e}', '\u{aa4f}'), ('\u{aa5a}', '\u{aa5b}'), ('\u{aac3}', '\u{aada}'),
- ('\u{aaf7}', '\u{ab00}'), ('\u{ab07}', '\u{ab08}'), ('\u{ab0f}', '\u{ab10}'), ('\u{ab17}',
- '\u{ab1f}'), ('\u{ab27}', '\u{ab27}'), ('\u{ab2f}', '\u{ab2f}'), ('\u{ab60}', '\u{ab63}'),
- ('\u{ab66}', '\u{abbf}'), ('\u{abee}', '\u{abef}'), ('\u{abfa}', '\u{abff}'), ('\u{ac01}',
- '\u{d7a2}'), ('\u{d7a4}', '\u{d7af}'), ('\u{d7c7}', '\u{d7ca}'), ('\u{d7fc}', '\u{d7ff}'),
- ('\u{e000}', '\u{f8ff}'), ('\u{fa6e}', '\u{fa6f}'), ('\u{fada}', '\u{faff}'), ('\u{fb07}',
- '\u{fb12}'), ('\u{fb18}', '\u{fb1c}'), ('\u{fb37}', '\u{fb37}'), ('\u{fb3d}', '\u{fb3d}'),
- ('\u{fb3f}', '\u{fb3f}'), ('\u{fb42}', '\u{fb42}'), ('\u{fb45}', '\u{fb45}'), ('\u{fbc2}',
- '\u{fbd2}'), ('\u{fd40}', '\u{fd4f}'), ('\u{fd90}', '\u{fd91}'), ('\u{fdc8}', '\u{fdef}'),
- ('\u{fdfe}', '\u{fdff}'), ('\u{fe1a}', '\u{fe1f}'), ('\u{fe2e}', '\u{fe2f}'), ('\u{fe53}',
- '\u{fe53}'), ('\u{fe67}', '\u{fe67}'), ('\u{fe6c}', '\u{fe6f}'), ('\u{fe75}', '\u{fe75}'),
- ('\u{fefd}', '\u{ff00}'), ('\u{ffbf}', '\u{ffc1}'), ('\u{ffc8}', '\u{ffc9}'), ('\u{ffd0}',
- '\u{ffd1}'), ('\u{ffd8}', '\u{ffd9}'), ('\u{ffdd}', '\u{ffdf}'), ('\u{ffe7}', '\u{ffe7}'),
- ('\u{ffef}', '\u{fffb}'), ('\u{fffe}', '\u{ffff}'), ('\u{1000c}', '\u{1000c}'),
+ ('\u{32ff}', '\u{32ff}'), ('\u{4db6}', '\u{4dbf}'), ('\u{9fcd}', '\u{9fff}'), ('\u{a48d}',
+ '\u{a48f}'), ('\u{a4c7}', '\u{a4cf}'), ('\u{a62c}', '\u{a63f}'), ('\u{a69e}', '\u{a69e}'),
+ ('\u{a6f8}', '\u{a6ff}'), ('\u{a78f}', '\u{a78f}'), ('\u{a7ae}', '\u{a7af}'), ('\u{a7b2}',
+ '\u{a7f6}'), ('\u{a82c}', '\u{a82f}'), ('\u{a83a}', '\u{a83f}'), ('\u{a878}', '\u{a87f}'),
+ ('\u{a8c5}', '\u{a8cd}'), ('\u{a8da}', '\u{a8df}'), ('\u{a8fc}', '\u{a8ff}'), ('\u{a954}',
+ '\u{a95e}'), ('\u{a97d}', '\u{a97f}'), ('\u{a9ce}', '\u{a9ce}'), ('\u{a9da}', '\u{a9dd}'),
+ ('\u{a9ff}', '\u{a9ff}'), ('\u{aa37}', '\u{aa3f}'), ('\u{aa4e}', '\u{aa4f}'), ('\u{aa5a}',
+ '\u{aa5b}'), ('\u{aac3}', '\u{aada}'), ('\u{aaf7}', '\u{ab00}'), ('\u{ab07}', '\u{ab08}'),
+ ('\u{ab0f}', '\u{ab10}'), ('\u{ab17}', '\u{ab1f}'), ('\u{ab27}', '\u{ab27}'), ('\u{ab2f}',
+ '\u{ab2f}'), ('\u{ab60}', '\u{ab63}'), ('\u{ab66}', '\u{abbf}'), ('\u{abee}', '\u{abef}'),
+ ('\u{abfa}', '\u{abff}'), ('\u{d7a4}', '\u{d7af}'), ('\u{d7c7}', '\u{d7ca}'), ('\u{d7fc}',
+ '\u{d7ff}'), ('\u{e000}', '\u{f8ff}'), ('\u{fa6e}', '\u{fa6f}'), ('\u{fada}', '\u{faff}'),
+ ('\u{fb07}', '\u{fb12}'), ('\u{fb18}', '\u{fb1c}'), ('\u{fb37}', '\u{fb37}'), ('\u{fb3d}',
+ '\u{fb3d}'), ('\u{fb3f}', '\u{fb3f}'), ('\u{fb42}', '\u{fb42}'), ('\u{fb45}', '\u{fb45}'),
+ ('\u{fbc2}', '\u{fbd2}'), ('\u{fd40}', '\u{fd4f}'), ('\u{fd90}', '\u{fd91}'), ('\u{fdc8}',
+ '\u{fdef}'), ('\u{fdfe}', '\u{fdff}'), ('\u{fe1a}', '\u{fe1f}'), ('\u{fe2e}', '\u{fe2f}'),
+ ('\u{fe53}', '\u{fe53}'), ('\u{fe67}', '\u{fe67}'), ('\u{fe6c}', '\u{fe6f}'), ('\u{fe75}',
+ '\u{fe75}'), ('\u{fefd}', '\u{ff00}'), ('\u{ffbf}', '\u{ffc1}'), ('\u{ffc8}', '\u{ffc9}'),
+ ('\u{ffd0}', '\u{ffd1}'), ('\u{ffd8}', '\u{ffd9}'), ('\u{ffdd}', '\u{ffdf}'), ('\u{ffe7}',
+ '\u{ffe7}'), ('\u{ffef}', '\u{fffb}'), ('\u{fffe}', '\u{ffff}'), ('\u{1000c}', '\u{1000c}'),
('\u{10027}', '\u{10027}'), ('\u{1003b}', '\u{1003b}'), ('\u{1003e}', '\u{1003e}'),
('\u{1004e}', '\u{1004f}'), ('\u{1005e}', '\u{1007f}'), ('\u{100fb}', '\u{100ff}'),
('\u{10103}', '\u{10106}'), ('\u{10134}', '\u{10136}'), ('\u{1018d}', '\u{1018f}'),
('\u{1f643}', '\u{1f644}'), ('\u{1f6d0}', '\u{1f6df}'), ('\u{1f6ed}', '\u{1f6ef}'),
('\u{1f6f4}', '\u{1f6ff}'), ('\u{1f774}', '\u{1f77f}'), ('\u{1f7d5}', '\u{1f7ff}'),
('\u{1f80c}', '\u{1f80f}'), ('\u{1f848}', '\u{1f84f}'), ('\u{1f85a}', '\u{1f85f}'),
- ('\u{1f888}', '\u{1f88f}'), ('\u{1f8ae}', '\u{1ffff}'), ('\u{20001}', '\u{2a6d5}'),
- ('\u{2a6d7}', '\u{2a6ff}'), ('\u{2a701}', '\u{2b733}'), ('\u{2b735}', '\u{2b73f}'),
- ('\u{2b741}', '\u{2b81c}'), ('\u{2b81e}', '\u{2f7ff}'), ('\u{2fa1e}', '\u{e00ff}'),
+ ('\u{1f888}', '\u{1f88f}'), ('\u{1f8ae}', '\u{1ffff}'), ('\u{2a6d7}', '\u{2a6ff}'),
+ ('\u{2b735}', '\u{2b73f}'), ('\u{2b81e}', '\u{2f7ff}'), ('\u{2fa1e}', '\u{e00ff}'),
('\u{e01f0}', '\u{10ffff}')
];
('\u{2e9a}', '\u{2e9a}'), ('\u{2ef4}', '\u{2eff}'), ('\u{2fd6}', '\u{2fef}'), ('\u{2ffc}',
'\u{2fff}'), ('\u{3040}', '\u{3040}'), ('\u{3097}', '\u{3098}'), ('\u{3100}', '\u{3104}'),
('\u{312e}', '\u{3130}'), ('\u{318f}', '\u{318f}'), ('\u{31bb}', '\u{31bf}'), ('\u{31e4}',
- '\u{31ef}'), ('\u{321f}', '\u{321f}'), ('\u{32ff}', '\u{32ff}'), ('\u{3401}', '\u{4db4}'),
- ('\u{4db6}', '\u{4dbf}'), ('\u{4e01}', '\u{9fcb}'), ('\u{9fcd}', '\u{9fff}'), ('\u{a48d}',
- '\u{a48f}'), ('\u{a4c7}', '\u{a4cf}'), ('\u{a62c}', '\u{a63f}'), ('\u{a69e}', '\u{a69e}'),
- ('\u{a6f8}', '\u{a6ff}'), ('\u{a78f}', '\u{a78f}'), ('\u{a7ae}', '\u{a7af}'), ('\u{a7b2}',
- '\u{a7f6}'), ('\u{a82c}', '\u{a82f}'), ('\u{a83a}', '\u{a83f}'), ('\u{a878}', '\u{a87f}'),
- ('\u{a8c5}', '\u{a8cd}'), ('\u{a8da}', '\u{a8df}'), ('\u{a8fc}', '\u{a8ff}'), ('\u{a954}',
- '\u{a95e}'), ('\u{a97d}', '\u{a97f}'), ('\u{a9ce}', '\u{a9ce}'), ('\u{a9da}', '\u{a9dd}'),
- ('\u{a9ff}', '\u{a9ff}'), ('\u{aa37}', '\u{aa3f}'), ('\u{aa4e}', '\u{aa4f}'), ('\u{aa5a}',
- '\u{aa5b}'), ('\u{aac3}', '\u{aada}'), ('\u{aaf7}', '\u{ab00}'), ('\u{ab07}', '\u{ab08}'),
- ('\u{ab0f}', '\u{ab10}'), ('\u{ab17}', '\u{ab1f}'), ('\u{ab27}', '\u{ab27}'), ('\u{ab2f}',
- '\u{ab2f}'), ('\u{ab60}', '\u{ab63}'), ('\u{ab66}', '\u{abbf}'), ('\u{abee}', '\u{abef}'),
- ('\u{abfa}', '\u{abff}'), ('\u{ac01}', '\u{d7a2}'), ('\u{d7a4}', '\u{d7af}'), ('\u{d7c7}',
- '\u{d7ca}'), ('\u{d7fc}', '\u{d7ff}'), ('\u{e001}', '\u{f8fe}'), ('\u{fa6e}', '\u{fa6f}'),
- ('\u{fada}', '\u{faff}'), ('\u{fb07}', '\u{fb12}'), ('\u{fb18}', '\u{fb1c}'), ('\u{fb37}',
- '\u{fb37}'), ('\u{fb3d}', '\u{fb3d}'), ('\u{fb3f}', '\u{fb3f}'), ('\u{fb42}', '\u{fb42}'),
- ('\u{fb45}', '\u{fb45}'), ('\u{fbc2}', '\u{fbd2}'), ('\u{fd40}', '\u{fd4f}'), ('\u{fd90}',
- '\u{fd91}'), ('\u{fdc8}', '\u{fdef}'), ('\u{fdfe}', '\u{fdff}'), ('\u{fe1a}', '\u{fe1f}'),
- ('\u{fe2e}', '\u{fe2f}'), ('\u{fe53}', '\u{fe53}'), ('\u{fe67}', '\u{fe67}'), ('\u{fe6c}',
- '\u{fe6f}'), ('\u{fe75}', '\u{fe75}'), ('\u{fefd}', '\u{fefe}'), ('\u{ff00}', '\u{ff00}'),
- ('\u{ffbf}', '\u{ffc1}'), ('\u{ffc8}', '\u{ffc9}'), ('\u{ffd0}', '\u{ffd1}'), ('\u{ffd8}',
- '\u{ffd9}'), ('\u{ffdd}', '\u{ffdf}'), ('\u{ffe7}', '\u{ffe7}'), ('\u{ffef}', '\u{fff8}'),
- ('\u{fffe}', '\u{ffff}'), ('\u{1000c}', '\u{1000c}'), ('\u{10027}', '\u{10027}'),
- ('\u{1003b}', '\u{1003b}'), ('\u{1003e}', '\u{1003e}'), ('\u{1004e}', '\u{1004f}'),
- ('\u{1005e}', '\u{1007f}'), ('\u{100fb}', '\u{100ff}'), ('\u{10103}', '\u{10106}'),
- ('\u{10134}', '\u{10136}'), ('\u{1018d}', '\u{1018f}'), ('\u{1019c}', '\u{1019f}'),
- ('\u{101a1}', '\u{101cf}'), ('\u{101fe}', '\u{1027f}'), ('\u{1029d}', '\u{1029f}'),
- ('\u{102d1}', '\u{102df}'), ('\u{102fc}', '\u{102ff}'), ('\u{10324}', '\u{1032f}'),
- ('\u{1034b}', '\u{1034f}'), ('\u{1037b}', '\u{1037f}'), ('\u{1039e}', '\u{1039e}'),
- ('\u{103c4}', '\u{103c7}'), ('\u{103d6}', '\u{103ff}'), ('\u{1049e}', '\u{1049f}'),
- ('\u{104aa}', '\u{104ff}'), ('\u{10528}', '\u{1052f}'), ('\u{10564}', '\u{1056e}'),
- ('\u{10570}', '\u{105ff}'), ('\u{10737}', '\u{1073f}'), ('\u{10756}', '\u{1075f}'),
- ('\u{10768}', '\u{107ff}'), ('\u{10806}', '\u{10807}'), ('\u{10809}', '\u{10809}'),
- ('\u{10836}', '\u{10836}'), ('\u{10839}', '\u{1083b}'), ('\u{1083d}', '\u{1083e}'),
- ('\u{10856}', '\u{10856}'), ('\u{1089f}', '\u{108a6}'), ('\u{108b0}', '\u{108ff}'),
- ('\u{1091c}', '\u{1091e}'), ('\u{1093a}', '\u{1093e}'), ('\u{10940}', '\u{1097f}'),
- ('\u{109b8}', '\u{109bd}'), ('\u{109c0}', '\u{109ff}'), ('\u{10a04}', '\u{10a04}'),
- ('\u{10a07}', '\u{10a0b}'), ('\u{10a14}', '\u{10a14}'), ('\u{10a18}', '\u{10a18}'),
- ('\u{10a34}', '\u{10a37}'), ('\u{10a3b}', '\u{10a3e}'), ('\u{10a48}', '\u{10a4f}'),
- ('\u{10a59}', '\u{10a5f}'), ('\u{10aa0}', '\u{10abf}'), ('\u{10ae7}', '\u{10aea}'),
- ('\u{10af7}', '\u{10aff}'), ('\u{10b36}', '\u{10b38}'), ('\u{10b56}', '\u{10b57}'),
- ('\u{10b73}', '\u{10b77}'), ('\u{10b92}', '\u{10b98}'), ('\u{10b9d}', '\u{10ba8}'),
- ('\u{10bb0}', '\u{10bff}'), ('\u{10c49}', '\u{10e5f}'), ('\u{10e7f}', '\u{10fff}'),
- ('\u{1104e}', '\u{11051}'), ('\u{11070}', '\u{1107e}'), ('\u{110c2}', '\u{110cf}'),
- ('\u{110e9}', '\u{110ef}'), ('\u{110fa}', '\u{110ff}'), ('\u{11135}', '\u{11135}'),
- ('\u{11144}', '\u{1114f}'), ('\u{11177}', '\u{1117f}'), ('\u{111c9}', '\u{111cc}'),
- ('\u{111ce}', '\u{111cf}'), ('\u{111db}', '\u{111e0}'), ('\u{111f5}', '\u{111ff}'),
- ('\u{11212}', '\u{11212}'), ('\u{1123e}', '\u{112af}'), ('\u{112eb}', '\u{112ef}'),
- ('\u{112fa}', '\u{11300}'), ('\u{11304}', '\u{11304}'), ('\u{1130d}', '\u{1130e}'),
- ('\u{11311}', '\u{11312}'), ('\u{11329}', '\u{11329}'), ('\u{11331}', '\u{11331}'),
- ('\u{11334}', '\u{11334}'), ('\u{1133a}', '\u{1133b}'), ('\u{11345}', '\u{11346}'),
- ('\u{11349}', '\u{1134a}'), ('\u{1134e}', '\u{11356}'), ('\u{11358}', '\u{1135c}'),
- ('\u{11364}', '\u{11365}'), ('\u{1136d}', '\u{1136f}'), ('\u{11375}', '\u{1147f}'),
- ('\u{114c8}', '\u{114cf}'), ('\u{114da}', '\u{1157f}'), ('\u{115b6}', '\u{115b7}'),
- ('\u{115ca}', '\u{115ff}'), ('\u{11645}', '\u{1164f}'), ('\u{1165a}', '\u{1167f}'),
- ('\u{116b8}', '\u{116bf}'), ('\u{116ca}', '\u{1189f}'), ('\u{118f3}', '\u{118fe}'),
- ('\u{11900}', '\u{11abf}'), ('\u{11af9}', '\u{11fff}'), ('\u{12399}', '\u{123ff}'),
- ('\u{1246f}', '\u{1246f}'), ('\u{12475}', '\u{12fff}'), ('\u{1342f}', '\u{167ff}'),
- ('\u{16a39}', '\u{16a3f}'), ('\u{16a5f}', '\u{16a5f}'), ('\u{16a6a}', '\u{16a6d}'),
- ('\u{16a70}', '\u{16acf}'), ('\u{16aee}', '\u{16aef}'), ('\u{16af6}', '\u{16aff}'),
- ('\u{16b46}', '\u{16b4f}'), ('\u{16b5a}', '\u{16b5a}'), ('\u{16b62}', '\u{16b62}'),
- ('\u{16b78}', '\u{16b7c}'), ('\u{16b90}', '\u{16eff}'), ('\u{16f45}', '\u{16f4f}'),
- ('\u{16f7f}', '\u{16f8e}'), ('\u{16fa0}', '\u{1afff}'), ('\u{1b002}', '\u{1bbff}'),
- ('\u{1bc6b}', '\u{1bc6f}'), ('\u{1bc7d}', '\u{1bc7f}'), ('\u{1bc89}', '\u{1bc8f}'),
- ('\u{1bc9a}', '\u{1bc9b}'), ('\u{1bca4}', '\u{1cfff}'), ('\u{1d0f6}', '\u{1d0ff}'),
- ('\u{1d127}', '\u{1d128}'), ('\u{1d1de}', '\u{1d1ff}'), ('\u{1d246}', '\u{1d2ff}'),
- ('\u{1d357}', '\u{1d35f}'), ('\u{1d372}', '\u{1d3ff}'), ('\u{1d455}', '\u{1d455}'),
- ('\u{1d49d}', '\u{1d49d}'), ('\u{1d4a0}', '\u{1d4a1}'), ('\u{1d4a3}', '\u{1d4a4}'),
- ('\u{1d4a7}', '\u{1d4a8}'), ('\u{1d4ad}', '\u{1d4ad}'), ('\u{1d4ba}', '\u{1d4ba}'),
- ('\u{1d4bc}', '\u{1d4bc}'), ('\u{1d4c4}', '\u{1d4c4}'), ('\u{1d506}', '\u{1d506}'),
- ('\u{1d50b}', '\u{1d50c}'), ('\u{1d515}', '\u{1d515}'), ('\u{1d51d}', '\u{1d51d}'),
- ('\u{1d53a}', '\u{1d53a}'), ('\u{1d53f}', '\u{1d53f}'), ('\u{1d545}', '\u{1d545}'),
- ('\u{1d547}', '\u{1d549}'), ('\u{1d551}', '\u{1d551}'), ('\u{1d6a6}', '\u{1d6a7}'),
- ('\u{1d7cc}', '\u{1d7cd}'), ('\u{1d800}', '\u{1e7ff}'), ('\u{1e8c5}', '\u{1e8c6}'),
- ('\u{1e8d7}', '\u{1edff}'), ('\u{1ee04}', '\u{1ee04}'), ('\u{1ee20}', '\u{1ee20}'),
- ('\u{1ee23}', '\u{1ee23}'), ('\u{1ee25}', '\u{1ee26}'), ('\u{1ee28}', '\u{1ee28}'),
- ('\u{1ee33}', '\u{1ee33}'), ('\u{1ee38}', '\u{1ee38}'), ('\u{1ee3a}', '\u{1ee3a}'),
- ('\u{1ee3c}', '\u{1ee41}'), ('\u{1ee43}', '\u{1ee46}'), ('\u{1ee48}', '\u{1ee48}'),
- ('\u{1ee4a}', '\u{1ee4a}'), ('\u{1ee4c}', '\u{1ee4c}'), ('\u{1ee50}', '\u{1ee50}'),
- ('\u{1ee53}', '\u{1ee53}'), ('\u{1ee55}', '\u{1ee56}'), ('\u{1ee58}', '\u{1ee58}'),
- ('\u{1ee5a}', '\u{1ee5a}'), ('\u{1ee5c}', '\u{1ee5c}'), ('\u{1ee5e}', '\u{1ee5e}'),
- ('\u{1ee60}', '\u{1ee60}'), ('\u{1ee63}', '\u{1ee63}'), ('\u{1ee65}', '\u{1ee66}'),
- ('\u{1ee6b}', '\u{1ee6b}'), ('\u{1ee73}', '\u{1ee73}'), ('\u{1ee78}', '\u{1ee78}'),
- ('\u{1ee7d}', '\u{1ee7d}'), ('\u{1ee7f}', '\u{1ee7f}'), ('\u{1ee8a}', '\u{1ee8a}'),
- ('\u{1ee9c}', '\u{1eea0}'), ('\u{1eea4}', '\u{1eea4}'), ('\u{1eeaa}', '\u{1eeaa}'),
- ('\u{1eebc}', '\u{1eeef}'), ('\u{1eef2}', '\u{1efff}'), ('\u{1f02c}', '\u{1f02f}'),
- ('\u{1f094}', '\u{1f09f}'), ('\u{1f0af}', '\u{1f0b0}'), ('\u{1f0c0}', '\u{1f0c0}'),
- ('\u{1f0d0}', '\u{1f0d0}'), ('\u{1f0f6}', '\u{1f0ff}'), ('\u{1f10d}', '\u{1f10f}'),
- ('\u{1f12f}', '\u{1f12f}'), ('\u{1f16c}', '\u{1f16f}'), ('\u{1f19b}', '\u{1f1e5}'),
- ('\u{1f203}', '\u{1f20f}'), ('\u{1f23b}', '\u{1f23f}'), ('\u{1f249}', '\u{1f24f}'),
- ('\u{1f252}', '\u{1f2ff}'), ('\u{1f32d}', '\u{1f32f}'), ('\u{1f37e}', '\u{1f37f}'),
- ('\u{1f3cf}', '\u{1f3d3}'), ('\u{1f3f8}', '\u{1f3ff}'), ('\u{1f4ff}', '\u{1f4ff}'),
- ('\u{1f54b}', '\u{1f54f}'), ('\u{1f57a}', '\u{1f57a}'), ('\u{1f5a4}', '\u{1f5a4}'),
- ('\u{1f643}', '\u{1f644}'), ('\u{1f6d0}', '\u{1f6df}'), ('\u{1f6ed}', '\u{1f6ef}'),
- ('\u{1f6f4}', '\u{1f6ff}'), ('\u{1f774}', '\u{1f77f}'), ('\u{1f7d5}', '\u{1f7ff}'),
- ('\u{1f80c}', '\u{1f80f}'), ('\u{1f848}', '\u{1f84f}'), ('\u{1f85a}', '\u{1f85f}'),
- ('\u{1f888}', '\u{1f88f}'), ('\u{1f8ae}', '\u{1ffff}'), ('\u{20001}', '\u{2a6d5}'),
- ('\u{2a6d7}', '\u{2a6ff}'), ('\u{2a701}', '\u{2b733}'), ('\u{2b735}', '\u{2b73f}'),
- ('\u{2b741}', '\u{2b81c}'), ('\u{2b81e}', '\u{2f7ff}'), ('\u{2fa1e}', '\u{e0000}'),
- ('\u{e0002}', '\u{e001f}'), ('\u{e0080}', '\u{e00ff}'), ('\u{e01f0}', '\u{effff}'),
- ('\u{f0001}', '\u{ffffc}'), ('\u{ffffe}', '\u{fffff}'), ('\u{100001}', '\u{10fffc}'),
- ('\u{10fffe}', '\u{10ffff}')
+ '\u{31ef}'), ('\u{321f}', '\u{321f}'), ('\u{32ff}', '\u{32ff}'), ('\u{4db6}', '\u{4dbf}'),
+ ('\u{9fcd}', '\u{9fff}'), ('\u{a48d}', '\u{a48f}'), ('\u{a4c7}', '\u{a4cf}'), ('\u{a62c}',
+ '\u{a63f}'), ('\u{a69e}', '\u{a69e}'), ('\u{a6f8}', '\u{a6ff}'), ('\u{a78f}', '\u{a78f}'),
+ ('\u{a7ae}', '\u{a7af}'), ('\u{a7b2}', '\u{a7f6}'), ('\u{a82c}', '\u{a82f}'), ('\u{a83a}',
+ '\u{a83f}'), ('\u{a878}', '\u{a87f}'), ('\u{a8c5}', '\u{a8cd}'), ('\u{a8da}', '\u{a8df}'),
+ ('\u{a8fc}', '\u{a8ff}'), ('\u{a954}', '\u{a95e}'), ('\u{a97d}', '\u{a97f}'), ('\u{a9ce}',
+ '\u{a9ce}'), ('\u{a9da}', '\u{a9dd}'), ('\u{a9ff}', '\u{a9ff}'), ('\u{aa37}', '\u{aa3f}'),
+ ('\u{aa4e}', '\u{aa4f}'), ('\u{aa5a}', '\u{aa5b}'), ('\u{aac3}', '\u{aada}'), ('\u{aaf7}',
+ '\u{ab00}'), ('\u{ab07}', '\u{ab08}'), ('\u{ab0f}', '\u{ab10}'), ('\u{ab17}', '\u{ab1f}'),
+ ('\u{ab27}', '\u{ab27}'), ('\u{ab2f}', '\u{ab2f}'), ('\u{ab60}', '\u{ab63}'), ('\u{ab66}',
+ '\u{abbf}'), ('\u{abee}', '\u{abef}'), ('\u{abfa}', '\u{abff}'), ('\u{d7a4}', '\u{d7af}'),
+ ('\u{d7c7}', '\u{d7ca}'), ('\u{d7fc}', '\u{d7ff}'), ('\u{fa6e}', '\u{fa6f}'), ('\u{fada}',
+ '\u{faff}'), ('\u{fb07}', '\u{fb12}'), ('\u{fb18}', '\u{fb1c}'), ('\u{fb37}', '\u{fb37}'),
+ ('\u{fb3d}', '\u{fb3d}'), ('\u{fb3f}', '\u{fb3f}'), ('\u{fb42}', '\u{fb42}'), ('\u{fb45}',
+ '\u{fb45}'), ('\u{fbc2}', '\u{fbd2}'), ('\u{fd40}', '\u{fd4f}'), ('\u{fd90}', '\u{fd91}'),
+ ('\u{fdc8}', '\u{fdef}'), ('\u{fdfe}', '\u{fdff}'), ('\u{fe1a}', '\u{fe1f}'), ('\u{fe2e}',
+ '\u{fe2f}'), ('\u{fe53}', '\u{fe53}'), ('\u{fe67}', '\u{fe67}'), ('\u{fe6c}', '\u{fe6f}'),
+ ('\u{fe75}', '\u{fe75}'), ('\u{fefd}', '\u{fefe}'), ('\u{ff00}', '\u{ff00}'), ('\u{ffbf}',
+ '\u{ffc1}'), ('\u{ffc8}', '\u{ffc9}'), ('\u{ffd0}', '\u{ffd1}'), ('\u{ffd8}', '\u{ffd9}'),
+ ('\u{ffdd}', '\u{ffdf}'), ('\u{ffe7}', '\u{ffe7}'), ('\u{ffef}', '\u{fff8}'), ('\u{fffe}',
+ '\u{ffff}'), ('\u{1000c}', '\u{1000c}'), ('\u{10027}', '\u{10027}'), ('\u{1003b}',
+ '\u{1003b}'), ('\u{1003e}', '\u{1003e}'), ('\u{1004e}', '\u{1004f}'), ('\u{1005e}',
+ '\u{1007f}'), ('\u{100fb}', '\u{100ff}'), ('\u{10103}', '\u{10106}'), ('\u{10134}',
+ '\u{10136}'), ('\u{1018d}', '\u{1018f}'), ('\u{1019c}', '\u{1019f}'), ('\u{101a1}',
+ '\u{101cf}'), ('\u{101fe}', '\u{1027f}'), ('\u{1029d}', '\u{1029f}'), ('\u{102d1}',
+ '\u{102df}'), ('\u{102fc}', '\u{102ff}'), ('\u{10324}', '\u{1032f}'), ('\u{1034b}',
+ '\u{1034f}'), ('\u{1037b}', '\u{1037f}'), ('\u{1039e}', '\u{1039e}'), ('\u{103c4}',
+ '\u{103c7}'), ('\u{103d6}', '\u{103ff}'), ('\u{1049e}', '\u{1049f}'), ('\u{104aa}',
+ '\u{104ff}'), ('\u{10528}', '\u{1052f}'), ('\u{10564}', '\u{1056e}'), ('\u{10570}',
+ '\u{105ff}'), ('\u{10737}', '\u{1073f}'), ('\u{10756}', '\u{1075f}'), ('\u{10768}',
+ '\u{107ff}'), ('\u{10806}', '\u{10807}'), ('\u{10809}', '\u{10809}'), ('\u{10836}',
+ '\u{10836}'), ('\u{10839}', '\u{1083b}'), ('\u{1083d}', '\u{1083e}'), ('\u{10856}',
+ '\u{10856}'), ('\u{1089f}', '\u{108a6}'), ('\u{108b0}', '\u{108ff}'), ('\u{1091c}',
+ '\u{1091e}'), ('\u{1093a}', '\u{1093e}'), ('\u{10940}', '\u{1097f}'), ('\u{109b8}',
+ '\u{109bd}'), ('\u{109c0}', '\u{109ff}'), ('\u{10a04}', '\u{10a04}'), ('\u{10a07}',
+ '\u{10a0b}'), ('\u{10a14}', '\u{10a14}'), ('\u{10a18}', '\u{10a18}'), ('\u{10a34}',
+ '\u{10a37}'), ('\u{10a3b}', '\u{10a3e}'), ('\u{10a48}', '\u{10a4f}'), ('\u{10a59}',
+ '\u{10a5f}'), ('\u{10aa0}', '\u{10abf}'), ('\u{10ae7}', '\u{10aea}'), ('\u{10af7}',
+ '\u{10aff}'), ('\u{10b36}', '\u{10b38}'), ('\u{10b56}', '\u{10b57}'), ('\u{10b73}',
+ '\u{10b77}'), ('\u{10b92}', '\u{10b98}'), ('\u{10b9d}', '\u{10ba8}'), ('\u{10bb0}',
+ '\u{10bff}'), ('\u{10c49}', '\u{10e5f}'), ('\u{10e7f}', '\u{10fff}'), ('\u{1104e}',
+ '\u{11051}'), ('\u{11070}', '\u{1107e}'), ('\u{110c2}', '\u{110cf}'), ('\u{110e9}',
+ '\u{110ef}'), ('\u{110fa}', '\u{110ff}'), ('\u{11135}', '\u{11135}'), ('\u{11144}',
+ '\u{1114f}'), ('\u{11177}', '\u{1117f}'), ('\u{111c9}', '\u{111cc}'), ('\u{111ce}',
+ '\u{111cf}'), ('\u{111db}', '\u{111e0}'), ('\u{111f5}', '\u{111ff}'), ('\u{11212}',
+ '\u{11212}'), ('\u{1123e}', '\u{112af}'), ('\u{112eb}', '\u{112ef}'), ('\u{112fa}',
+ '\u{11300}'), ('\u{11304}', '\u{11304}'), ('\u{1130d}', '\u{1130e}'), ('\u{11311}',
+ '\u{11312}'), ('\u{11329}', '\u{11329}'), ('\u{11331}', '\u{11331}'), ('\u{11334}',
+ '\u{11334}'), ('\u{1133a}', '\u{1133b}'), ('\u{11345}', '\u{11346}'), ('\u{11349}',
+ '\u{1134a}'), ('\u{1134e}', '\u{11356}'), ('\u{11358}', '\u{1135c}'), ('\u{11364}',
+ '\u{11365}'), ('\u{1136d}', '\u{1136f}'), ('\u{11375}', '\u{1147f}'), ('\u{114c8}',
+ '\u{114cf}'), ('\u{114da}', '\u{1157f}'), ('\u{115b6}', '\u{115b7}'), ('\u{115ca}',
+ '\u{115ff}'), ('\u{11645}', '\u{1164f}'), ('\u{1165a}', '\u{1167f}'), ('\u{116b8}',
+ '\u{116bf}'), ('\u{116ca}', '\u{1189f}'), ('\u{118f3}', '\u{118fe}'), ('\u{11900}',
+ '\u{11abf}'), ('\u{11af9}', '\u{11fff}'), ('\u{12399}', '\u{123ff}'), ('\u{1246f}',
+ '\u{1246f}'), ('\u{12475}', '\u{12fff}'), ('\u{1342f}', '\u{167ff}'), ('\u{16a39}',
+ '\u{16a3f}'), ('\u{16a5f}', '\u{16a5f}'), ('\u{16a6a}', '\u{16a6d}'), ('\u{16a70}',
+ '\u{16acf}'), ('\u{16aee}', '\u{16aef}'), ('\u{16af6}', '\u{16aff}'), ('\u{16b46}',
+ '\u{16b4f}'), ('\u{16b5a}', '\u{16b5a}'), ('\u{16b62}', '\u{16b62}'), ('\u{16b78}',
+ '\u{16b7c}'), ('\u{16b90}', '\u{16eff}'), ('\u{16f45}', '\u{16f4f}'), ('\u{16f7f}',
+ '\u{16f8e}'), ('\u{16fa0}', '\u{1afff}'), ('\u{1b002}', '\u{1bbff}'), ('\u{1bc6b}',
+ '\u{1bc6f}'), ('\u{1bc7d}', '\u{1bc7f}'), ('\u{1bc89}', '\u{1bc8f}'), ('\u{1bc9a}',
+ '\u{1bc9b}'), ('\u{1bca4}', '\u{1cfff}'), ('\u{1d0f6}', '\u{1d0ff}'), ('\u{1d127}',
+ '\u{1d128}'), ('\u{1d1de}', '\u{1d1ff}'), ('\u{1d246}', '\u{1d2ff}'), ('\u{1d357}',
+ '\u{1d35f}'), ('\u{1d372}', '\u{1d3ff}'), ('\u{1d455}', '\u{1d455}'), ('\u{1d49d}',
+ '\u{1d49d}'), ('\u{1d4a0}', '\u{1d4a1}'), ('\u{1d4a3}', '\u{1d4a4}'), ('\u{1d4a7}',
+ '\u{1d4a8}'), ('\u{1d4ad}', '\u{1d4ad}'), ('\u{1d4ba}', '\u{1d4ba}'), ('\u{1d4bc}',
+ '\u{1d4bc}'), ('\u{1d4c4}', '\u{1d4c4}'), ('\u{1d506}', '\u{1d506}'), ('\u{1d50b}',
+ '\u{1d50c}'), ('\u{1d515}', '\u{1d515}'), ('\u{1d51d}', '\u{1d51d}'), ('\u{1d53a}',
+ '\u{1d53a}'), ('\u{1d53f}', '\u{1d53f}'), ('\u{1d545}', '\u{1d545}'), ('\u{1d547}',
+ '\u{1d549}'), ('\u{1d551}', '\u{1d551}'), ('\u{1d6a6}', '\u{1d6a7}'), ('\u{1d7cc}',
+ '\u{1d7cd}'), ('\u{1d800}', '\u{1e7ff}'), ('\u{1e8c5}', '\u{1e8c6}'), ('\u{1e8d7}',
+ '\u{1edff}'), ('\u{1ee04}', '\u{1ee04}'), ('\u{1ee20}', '\u{1ee20}'), ('\u{1ee23}',
+ '\u{1ee23}'), ('\u{1ee25}', '\u{1ee26}'), ('\u{1ee28}', '\u{1ee28}'), ('\u{1ee33}',
+ '\u{1ee33}'), ('\u{1ee38}', '\u{1ee38}'), ('\u{1ee3a}', '\u{1ee3a}'), ('\u{1ee3c}',
+ '\u{1ee41}'), ('\u{1ee43}', '\u{1ee46}'), ('\u{1ee48}', '\u{1ee48}'), ('\u{1ee4a}',
+ '\u{1ee4a}'), ('\u{1ee4c}', '\u{1ee4c}'), ('\u{1ee50}', '\u{1ee50}'), ('\u{1ee53}',
+ '\u{1ee53}'), ('\u{1ee55}', '\u{1ee56}'), ('\u{1ee58}', '\u{1ee58}'), ('\u{1ee5a}',
+ '\u{1ee5a}'), ('\u{1ee5c}', '\u{1ee5c}'), ('\u{1ee5e}', '\u{1ee5e}'), ('\u{1ee60}',
+ '\u{1ee60}'), ('\u{1ee63}', '\u{1ee63}'), ('\u{1ee65}', '\u{1ee66}'), ('\u{1ee6b}',
+ '\u{1ee6b}'), ('\u{1ee73}', '\u{1ee73}'), ('\u{1ee78}', '\u{1ee78}'), ('\u{1ee7d}',
+ '\u{1ee7d}'), ('\u{1ee7f}', '\u{1ee7f}'), ('\u{1ee8a}', '\u{1ee8a}'), ('\u{1ee9c}',
+ '\u{1eea0}'), ('\u{1eea4}', '\u{1eea4}'), ('\u{1eeaa}', '\u{1eeaa}'), ('\u{1eebc}',
+ '\u{1eeef}'), ('\u{1eef2}', '\u{1efff}'), ('\u{1f02c}', '\u{1f02f}'), ('\u{1f094}',
+ '\u{1f09f}'), ('\u{1f0af}', '\u{1f0b0}'), ('\u{1f0c0}', '\u{1f0c0}'), ('\u{1f0d0}',
+ '\u{1f0d0}'), ('\u{1f0f6}', '\u{1f0ff}'), ('\u{1f10d}', '\u{1f10f}'), ('\u{1f12f}',
+ '\u{1f12f}'), ('\u{1f16c}', '\u{1f16f}'), ('\u{1f19b}', '\u{1f1e5}'), ('\u{1f203}',
+ '\u{1f20f}'), ('\u{1f23b}', '\u{1f23f}'), ('\u{1f249}', '\u{1f24f}'), ('\u{1f252}',
+ '\u{1f2ff}'), ('\u{1f32d}', '\u{1f32f}'), ('\u{1f37e}', '\u{1f37f}'), ('\u{1f3cf}',
+ '\u{1f3d3}'), ('\u{1f3f8}', '\u{1f3ff}'), ('\u{1f4ff}', '\u{1f4ff}'), ('\u{1f54b}',
+ '\u{1f54f}'), ('\u{1f57a}', '\u{1f57a}'), ('\u{1f5a4}', '\u{1f5a4}'), ('\u{1f643}',
+ '\u{1f644}'), ('\u{1f6d0}', '\u{1f6df}'), ('\u{1f6ed}', '\u{1f6ef}'), ('\u{1f6f4}',
+ '\u{1f6ff}'), ('\u{1f774}', '\u{1f77f}'), ('\u{1f7d5}', '\u{1f7ff}'), ('\u{1f80c}',
+ '\u{1f80f}'), ('\u{1f848}', '\u{1f84f}'), ('\u{1f85a}', '\u{1f85f}'), ('\u{1f888}',
+ '\u{1f88f}'), ('\u{1f8ae}', '\u{1ffff}'), ('\u{2a6d7}', '\u{2a6ff}'), ('\u{2b735}',
+ '\u{2b73f}'), ('\u{2b81e}', '\u{2f7ff}'), ('\u{2fa1e}', '\u{e0000}'), ('\u{e0002}',
+ '\u{e001f}'), ('\u{e0080}', '\u{e00ff}'), ('\u{e01f0}', '\u{effff}'), ('\u{ffffe}',
+ '\u{fffff}'), ('\u{10fffe}', '\u{10ffff}')
];
pub const Co_table: &'static [(char, char)] = &[
- ('\u{e000}', '\u{e000}'), ('\u{f8ff}', '\u{f8ff}'), ('\u{f0000}', '\u{f0000}'),
- ('\u{ffffd}', '\u{ffffd}'), ('\u{100000}', '\u{100000}'), ('\u{10fffd}', '\u{10fffd}')
+ ('\u{e000}', '\u{f8ff}'), ('\u{f0000}', '\u{ffffd}'), ('\u{100000}', '\u{10fffd}')
];
pub const L_table: &'static [(char, char)] = &[
('\u{2e2f}', '\u{2e2f}'), ('\u{3005}', '\u{3006}'), ('\u{3031}', '\u{3035}'), ('\u{303b}',
'\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{309d}', '\u{309f}'), ('\u{30a1}', '\u{30fa}'),
('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'), ('\u{3131}', '\u{318e}'), ('\u{31a0}',
- '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}', '\u{3400}'), ('\u{4db5}', '\u{4db5}'),
- ('\u{4e00}', '\u{4e00}'), ('\u{9fcc}', '\u{9fcc}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}',
- '\u{a4fd}'), ('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'),
- ('\u{a640}', '\u{a66e}'), ('\u{a67f}', '\u{a69d}'), ('\u{a6a0}', '\u{a6e5}'), ('\u{a717}',
- '\u{a71f}'), ('\u{a722}', '\u{a788}'), ('\u{a78b}', '\u{a78e}'), ('\u{a790}', '\u{a7ad}'),
- ('\u{a7b0}', '\u{a7b1}'), ('\u{a7f7}', '\u{a801}'), ('\u{a803}', '\u{a805}'), ('\u{a807}',
- '\u{a80a}'), ('\u{a80c}', '\u{a822}'), ('\u{a840}', '\u{a873}'), ('\u{a882}', '\u{a8b3}'),
- ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}', '\u{a8fb}'), ('\u{a90a}', '\u{a925}'), ('\u{a930}',
- '\u{a946}'), ('\u{a960}', '\u{a97c}'), ('\u{a984}', '\u{a9b2}'), ('\u{a9cf}', '\u{a9cf}'),
- ('\u{a9e0}', '\u{a9e4}'), ('\u{a9e6}', '\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}',
- '\u{aa28}'), ('\u{aa40}', '\u{aa42}'), ('\u{aa44}', '\u{aa4b}'), ('\u{aa60}', '\u{aa76}'),
- ('\u{aa7a}', '\u{aa7a}'), ('\u{aa7e}', '\u{aaaf}'), ('\u{aab1}', '\u{aab1}'), ('\u{aab5}',
- '\u{aab6}'), ('\u{aab9}', '\u{aabd}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'),
- ('\u{aadb}', '\u{aadd}'), ('\u{aae0}', '\u{aaea}'), ('\u{aaf2}', '\u{aaf4}'), ('\u{ab01}',
- '\u{ab06}'), ('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'),
- ('\u{ab28}', '\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab5f}'), ('\u{ab64}',
- '\u{ab65}'), ('\u{abc0}', '\u{abe2}'), ('\u{ac00}', '\u{ac00}'), ('\u{d7a3}', '\u{d7a3}'),
- ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}',
- '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}', '\u{fb1d}'),
- ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}',
- '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'), ('\u{fb46}', '\u{fbb1}'),
- ('\u{fbd3}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}',
- '\u{fdfb}'), ('\u{fe70}', '\u{fe74}'), ('\u{fe76}', '\u{fefc}'), ('\u{ff21}', '\u{ff3a}'),
- ('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}',
- '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', '\u{1000b}'),
- ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}', '\u{1003d}'),
- ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}', '\u{100fa}'),
- ('\u{10280}', '\u{1029c}'), ('\u{102a0}', '\u{102d0}'), ('\u{10300}', '\u{1031f}'),
- ('\u{10330}', '\u{10340}'), ('\u{10342}', '\u{10349}'), ('\u{10350}', '\u{10375}'),
- ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'), ('\u{103c8}', '\u{103cf}'),
- ('\u{10400}', '\u{1049d}'), ('\u{10500}', '\u{10527}'), ('\u{10530}', '\u{10563}'),
- ('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'), ('\u{10760}', '\u{10767}'),
- ('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'), ('\u{1080a}', '\u{10835}'),
- ('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'), ('\u{1083f}', '\u{10855}'),
- ('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'), ('\u{10900}', '\u{10915}'),
- ('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'),
- ('\u{10a00}', '\u{10a00}'), ('\u{10a10}', '\u{10a13}'), ('\u{10a15}', '\u{10a17}'),
- ('\u{10a19}', '\u{10a33}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'),
- ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae4}'), ('\u{10b00}', '\u{10b35}'),
- ('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'),
- ('\u{10c00}', '\u{10c48}'), ('\u{11003}', '\u{11037}'), ('\u{11083}', '\u{110af}'),
- ('\u{110d0}', '\u{110e8}'), ('\u{11103}', '\u{11126}'), ('\u{11150}', '\u{11172}'),
- ('\u{11176}', '\u{11176}'), ('\u{11183}', '\u{111b2}'), ('\u{111c1}', '\u{111c4}'),
- ('\u{111da}', '\u{111da}'), ('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{1122b}'),
- ('\u{112b0}', '\u{112de}'), ('\u{11305}', '\u{1130c}'), ('\u{1130f}', '\u{11310}'),
- ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'), ('\u{11332}', '\u{11333}'),
- ('\u{11335}', '\u{11339}'), ('\u{1133d}', '\u{1133d}'), ('\u{1135d}', '\u{11361}'),
- ('\u{11480}', '\u{114af}'), ('\u{114c4}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'),
- ('\u{11580}', '\u{115ae}'), ('\u{11600}', '\u{1162f}'), ('\u{11644}', '\u{11644}'),
- ('\u{11680}', '\u{116aa}'), ('\u{118a0}', '\u{118df}'), ('\u{118ff}', '\u{118ff}'),
- ('\u{11ac0}', '\u{11af8}'), ('\u{12000}', '\u{12398}'), ('\u{13000}', '\u{1342e}'),
- ('\u{16800}', '\u{16a38}'), ('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}', '\u{16aed}'),
- ('\u{16b00}', '\u{16b2f}'), ('\u{16b40}', '\u{16b43}'), ('\u{16b63}', '\u{16b77}'),
- ('\u{16b7d}', '\u{16b8f}'), ('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f50}'),
- ('\u{16f93}', '\u{16f9f}'), ('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'),
- ('\u{1bc70}', '\u{1bc7c}'), ('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'),
- ('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'),
- ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'),
- ('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'),
- ('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'),
- ('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'),
- ('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'),
- ('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'),
- ('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'),
- ('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'),
- ('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'),
- ('\u{1e800}', '\u{1e8c4}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'),
- ('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'),
- ('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'),
- ('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'),
- ('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'),
- ('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'),
- ('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'),
- ('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'),
- ('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'),
- ('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'),
- ('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'),
- ('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{20000}'), ('\u{2a6d6}', '\u{2a6d6}'),
- ('\u{2a700}', '\u{2a700}'), ('\u{2b734}', '\u{2b734}'), ('\u{2b740}', '\u{2b740}'),
- ('\u{2b81d}', '\u{2b81d}'), ('\u{2f800}', '\u{2fa1d}')
+ '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}', '\u{4db5}'), ('\u{4e00}', '\u{9fcc}'),
+ ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'), ('\u{a500}', '\u{a60c}'), ('\u{a610}',
+ '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a640}', '\u{a66e}'), ('\u{a67f}', '\u{a69d}'),
+ ('\u{a6a0}', '\u{a6e5}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', '\u{a788}'), ('\u{a78b}',
+ '\u{a78e}'), ('\u{a790}', '\u{a7ad}'), ('\u{a7b0}', '\u{a7b1}'), ('\u{a7f7}', '\u{a801}'),
+ ('\u{a803}', '\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a822}'), ('\u{a840}',
+ '\u{a873}'), ('\u{a882}', '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}', '\u{a8fb}'),
+ ('\u{a90a}', '\u{a925}'), ('\u{a930}', '\u{a946}'), ('\u{a960}', '\u{a97c}'), ('\u{a984}',
+ '\u{a9b2}'), ('\u{a9cf}', '\u{a9cf}'), ('\u{a9e0}', '\u{a9e4}'), ('\u{a9e6}', '\u{a9ef}'),
+ ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'), ('\u{aa40}', '\u{aa42}'), ('\u{aa44}',
+ '\u{aa4b}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', '\u{aa7a}'), ('\u{aa7e}', '\u{aaaf}'),
+ ('\u{aab1}', '\u{aab1}'), ('\u{aab5}', '\u{aab6}'), ('\u{aab9}', '\u{aabd}'), ('\u{aac0}',
+ '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), ('\u{aadb}', '\u{aadd}'), ('\u{aae0}', '\u{aaea}'),
+ ('\u{aaf2}', '\u{aaf4}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}', '\u{ab0e}'), ('\u{ab11}',
+ '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'),
+ ('\u{ab5c}', '\u{ab5f}'), ('\u{ab64}', '\u{ab65}'), ('\u{abc0}', '\u{abe2}'), ('\u{ac00}',
+ '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'),
+ ('\u{fa70}', '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}',
+ '\u{fb1d}'), ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}', '\u{fb3c}'),
+ ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'), ('\u{fb46}',
+ '\u{fbb1}'), ('\u{fbd3}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'),
+ ('\u{fdf0}', '\u{fdfb}'), ('\u{fe70}', '\u{fe74}'), ('\u{fe76}', '\u{fefc}'), ('\u{ff21}',
+ '\u{ff3a}'), ('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'),
+ ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}',
+ '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}',
+ '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}',
+ '\u{100fa}'), ('\u{10280}', '\u{1029c}'), ('\u{102a0}', '\u{102d0}'), ('\u{10300}',
+ '\u{1031f}'), ('\u{10330}', '\u{10340}'), ('\u{10342}', '\u{10349}'), ('\u{10350}',
+ '\u{10375}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'), ('\u{103c8}',
+ '\u{103cf}'), ('\u{10400}', '\u{1049d}'), ('\u{10500}', '\u{10527}'), ('\u{10530}',
+ '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'), ('\u{10760}',
+ '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'), ('\u{1080a}',
+ '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'), ('\u{1083f}',
+ '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'), ('\u{10900}',
+ '\u{10915}'), ('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}',
+ '\u{109bf}'), ('\u{10a00}', '\u{10a00}'), ('\u{10a10}', '\u{10a13}'), ('\u{10a15}',
+ '\u{10a17}'), ('\u{10a19}', '\u{10a33}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}',
+ '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae4}'), ('\u{10b00}',
+ '\u{10b35}'), ('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}',
+ '\u{10b91}'), ('\u{10c00}', '\u{10c48}'), ('\u{11003}', '\u{11037}'), ('\u{11083}',
+ '\u{110af}'), ('\u{110d0}', '\u{110e8}'), ('\u{11103}', '\u{11126}'), ('\u{11150}',
+ '\u{11172}'), ('\u{11176}', '\u{11176}'), ('\u{11183}', '\u{111b2}'), ('\u{111c1}',
+ '\u{111c4}'), ('\u{111da}', '\u{111da}'), ('\u{11200}', '\u{11211}'), ('\u{11213}',
+ '\u{1122b}'), ('\u{112b0}', '\u{112de}'), ('\u{11305}', '\u{1130c}'), ('\u{1130f}',
+ '\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'), ('\u{11332}',
+ '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133d}', '\u{1133d}'), ('\u{1135d}',
+ '\u{11361}'), ('\u{11480}', '\u{114af}'), ('\u{114c4}', '\u{114c5}'), ('\u{114c7}',
+ '\u{114c7}'), ('\u{11580}', '\u{115ae}'), ('\u{11600}', '\u{1162f}'), ('\u{11644}',
+ '\u{11644}'), ('\u{11680}', '\u{116aa}'), ('\u{118a0}', '\u{118df}'), ('\u{118ff}',
+ '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{12000}', '\u{12398}'), ('\u{13000}',
+ '\u{1342e}'), ('\u{16800}', '\u{16a38}'), ('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}',
+ '\u{16aed}'), ('\u{16b00}', '\u{16b2f}'), ('\u{16b40}', '\u{16b43}'), ('\u{16b63}',
+ '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'), ('\u{16f00}', '\u{16f44}'), ('\u{16f50}',
+ '\u{16f50}'), ('\u{16f93}', '\u{16f9f}'), ('\u{1b000}', '\u{1b001}'), ('\u{1bc00}',
+ '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'), ('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}',
+ '\u{1bc99}'), ('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}',
+ '\u{1d49f}'), ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}',
+ '\u{1d4ac}'), ('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}',
+ '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}',
+ '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}',
+ '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}',
+ '\u{1d550}'), ('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}',
+ '\u{1d6da}'), ('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}',
+ '\u{1d734}'), ('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}',
+ '\u{1d788}'), ('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}',
+ '\u{1d7cb}'), ('\u{1e800}', '\u{1e8c4}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}',
+ '\u{1ee1f}'), ('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}',
+ '\u{1ee27}'), ('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}',
+ '\u{1ee39}'), ('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}',
+ '\u{1ee47}'), ('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}',
+ '\u{1ee4f}'), ('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}',
+ '\u{1ee57}'), ('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}',
+ '\u{1ee5d}'), ('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}',
+ '\u{1ee64}'), ('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}',
+ '\u{1ee77}'), ('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}',
+ '\u{1ee89}'), ('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}',
+ '\u{1eea9}'), ('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}',
+ '\u{2b734}'), ('\u{2b740}', '\u{2b81d}'), ('\u{2f800}', '\u{2fa1d}')
];
pub const LC_table: &'static [(char, char)] = &[
'\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{3006}', '\u{3006}'), ('\u{303c}', '\u{303c}'),
('\u{3041}', '\u{3096}'), ('\u{309f}', '\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30ff}',
'\u{30ff}'), ('\u{3105}', '\u{312d}'), ('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'),
- ('\u{31f0}', '\u{31ff}'), ('\u{3400}', '\u{3400}'), ('\u{4db5}', '\u{4db5}'), ('\u{4e00}',
- '\u{4e00}'), ('\u{9fcc}', '\u{9fcc}'), ('\u{a000}', '\u{a014}'), ('\u{a016}', '\u{a48c}'),
- ('\u{a4d0}', '\u{a4f7}'), ('\u{a500}', '\u{a60b}'), ('\u{a610}', '\u{a61f}'), ('\u{a62a}',
- '\u{a62b}'), ('\u{a66e}', '\u{a66e}'), ('\u{a6a0}', '\u{a6e5}'), ('\u{a7f7}', '\u{a7f7}'),
- ('\u{a7fb}', '\u{a801}'), ('\u{a803}', '\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}',
- '\u{a822}'), ('\u{a840}', '\u{a873}'), ('\u{a882}', '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'),
- ('\u{a8fb}', '\u{a8fb}'), ('\u{a90a}', '\u{a925}'), ('\u{a930}', '\u{a946}'), ('\u{a960}',
- '\u{a97c}'), ('\u{a984}', '\u{a9b2}'), ('\u{a9e0}', '\u{a9e4}'), ('\u{a9e7}', '\u{a9ef}'),
- ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'), ('\u{aa40}', '\u{aa42}'), ('\u{aa44}',
- '\u{aa4b}'), ('\u{aa60}', '\u{aa6f}'), ('\u{aa71}', '\u{aa76}'), ('\u{aa7a}', '\u{aa7a}'),
- ('\u{aa7e}', '\u{aaaf}'), ('\u{aab1}', '\u{aab1}'), ('\u{aab5}', '\u{aab6}'), ('\u{aab9}',
- '\u{aabd}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), ('\u{aadb}', '\u{aadc}'),
- ('\u{aae0}', '\u{aaea}'), ('\u{aaf2}', '\u{aaf2}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}',
- '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'),
- ('\u{abc0}', '\u{abe2}'), ('\u{ac00}', '\u{ac00}'), ('\u{d7a3}', '\u{d7a3}'), ('\u{d7b0}',
- '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}', '\u{fad9}'),
- ('\u{fb1d}', '\u{fb1d}'), ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}',
- '\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'),
- ('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), ('\u{fd92}',
- '\u{fdc7}'), ('\u{fdf0}', '\u{fdfb}'), ('\u{fe70}', '\u{fe74}'), ('\u{fe76}', '\u{fefc}'),
- ('\u{ff66}', '\u{ff6f}'), ('\u{ff71}', '\u{ff9d}'), ('\u{ffa0}', '\u{ffbe}'), ('\u{ffc2}',
- '\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'),
- ('\u{10000}', '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'),
- ('\u{1003c}', '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'),
- ('\u{10080}', '\u{100fa}'), ('\u{10280}', '\u{1029c}'), ('\u{102a0}', '\u{102d0}'),
- ('\u{10300}', '\u{1031f}'), ('\u{10330}', '\u{10340}'), ('\u{10342}', '\u{10349}'),
- ('\u{10350}', '\u{10375}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'),
- ('\u{103c8}', '\u{103cf}'), ('\u{10450}', '\u{1049d}'), ('\u{10500}', '\u{10527}'),
- ('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'),
- ('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'),
- ('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'),
- ('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'),
- ('\u{10900}', '\u{10915}'), ('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'),
- ('\u{109be}', '\u{109bf}'), ('\u{10a00}', '\u{10a00}'), ('\u{10a10}', '\u{10a13}'),
- ('\u{10a15}', '\u{10a17}'), ('\u{10a19}', '\u{10a33}'), ('\u{10a60}', '\u{10a7c}'),
- ('\u{10a80}', '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae4}'),
- ('\u{10b00}', '\u{10b35}'), ('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'),
- ('\u{10b80}', '\u{10b91}'), ('\u{10c00}', '\u{10c48}'), ('\u{11003}', '\u{11037}'),
- ('\u{11083}', '\u{110af}'), ('\u{110d0}', '\u{110e8}'), ('\u{11103}', '\u{11126}'),
- ('\u{11150}', '\u{11172}'), ('\u{11176}', '\u{11176}'), ('\u{11183}', '\u{111b2}'),
- ('\u{111c1}', '\u{111c4}'), ('\u{111da}', '\u{111da}'), ('\u{11200}', '\u{11211}'),
- ('\u{11213}', '\u{1122b}'), ('\u{112b0}', '\u{112de}'), ('\u{11305}', '\u{1130c}'),
- ('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'),
- ('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133d}', '\u{1133d}'),
- ('\u{1135d}', '\u{11361}'), ('\u{11480}', '\u{114af}'), ('\u{114c4}', '\u{114c5}'),
- ('\u{114c7}', '\u{114c7}'), ('\u{11580}', '\u{115ae}'), ('\u{11600}', '\u{1162f}'),
- ('\u{11644}', '\u{11644}'), ('\u{11680}', '\u{116aa}'), ('\u{118ff}', '\u{118ff}'),
- ('\u{11ac0}', '\u{11af8}'), ('\u{12000}', '\u{12398}'), ('\u{13000}', '\u{1342e}'),
- ('\u{16800}', '\u{16a38}'), ('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}', '\u{16aed}'),
- ('\u{16b00}', '\u{16b2f}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'),
- ('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f50}'), ('\u{1b000}', '\u{1b001}'),
- ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'), ('\u{1bc80}', '\u{1bc88}'),
- ('\u{1bc90}', '\u{1bc99}'), ('\u{1e800}', '\u{1e8c4}'), ('\u{1ee00}', '\u{1ee03}'),
- ('\u{1ee05}', '\u{1ee1f}'), ('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'),
- ('\u{1ee27}', '\u{1ee27}'), ('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'),
- ('\u{1ee39}', '\u{1ee39}'), ('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'),
- ('\u{1ee47}', '\u{1ee47}'), ('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'),
- ('\u{1ee4d}', '\u{1ee4f}'), ('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'),
- ('\u{1ee57}', '\u{1ee57}'), ('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'),
- ('\u{1ee5d}', '\u{1ee5d}'), ('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'),
- ('\u{1ee64}', '\u{1ee64}'), ('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'),
- ('\u{1ee74}', '\u{1ee77}'), ('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'),
- ('\u{1ee80}', '\u{1ee89}'), ('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'),
- ('\u{1eea5}', '\u{1eea9}'), ('\u{1eeab}', '\u{1eebb}'), ('\u{20000}', '\u{20000}'),
- ('\u{2a6d6}', '\u{2a6d6}'), ('\u{2a700}', '\u{2a700}'), ('\u{2b734}', '\u{2b734}'),
- ('\u{2b740}', '\u{2b740}'), ('\u{2b81d}', '\u{2b81d}'), ('\u{2f800}', '\u{2fa1d}')
+ ('\u{31f0}', '\u{31ff}'), ('\u{3400}', '\u{4db5}'), ('\u{4e00}', '\u{9fcc}'), ('\u{a000}',
+ '\u{a014}'), ('\u{a016}', '\u{a48c}'), ('\u{a4d0}', '\u{a4f7}'), ('\u{a500}', '\u{a60b}'),
+ ('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a66e}', '\u{a66e}'), ('\u{a6a0}',
+ '\u{a6e5}'), ('\u{a7f7}', '\u{a7f7}'), ('\u{a7fb}', '\u{a801}'), ('\u{a803}', '\u{a805}'),
+ ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a822}'), ('\u{a840}', '\u{a873}'), ('\u{a882}',
+ '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}', '\u{a8fb}'), ('\u{a90a}', '\u{a925}'),
+ ('\u{a930}', '\u{a946}'), ('\u{a960}', '\u{a97c}'), ('\u{a984}', '\u{a9b2}'), ('\u{a9e0}',
+ '\u{a9e4}'), ('\u{a9e7}', '\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'),
+ ('\u{aa40}', '\u{aa42}'), ('\u{aa44}', '\u{aa4b}'), ('\u{aa60}', '\u{aa6f}'), ('\u{aa71}',
+ '\u{aa76}'), ('\u{aa7a}', '\u{aa7a}'), ('\u{aa7e}', '\u{aaaf}'), ('\u{aab1}', '\u{aab1}'),
+ ('\u{aab5}', '\u{aab6}'), ('\u{aab9}', '\u{aabd}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}',
+ '\u{aac2}'), ('\u{aadb}', '\u{aadc}'), ('\u{aae0}', '\u{aaea}'), ('\u{aaf2}', '\u{aaf2}'),
+ ('\u{ab01}', '\u{ab06}'), ('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}',
+ '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'), ('\u{abc0}', '\u{abe2}'), ('\u{ac00}', '\u{d7a3}'),
+ ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}',
+ '\u{fad9}'), ('\u{fb1d}', '\u{fb1d}'), ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'),
+ ('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}',
+ '\u{fb44}'), ('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'),
+ ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdfb}'), ('\u{fe70}', '\u{fe74}'), ('\u{fe76}',
+ '\u{fefc}'), ('\u{ff66}', '\u{ff6f}'), ('\u{ff71}', '\u{ff9d}'), ('\u{ffa0}', '\u{ffbe}'),
+ ('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}',
+ '\u{ffdc}'), ('\u{10000}', '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}',
+ '\u{1003a}'), ('\u{1003c}', '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}',
+ '\u{1005d}'), ('\u{10080}', '\u{100fa}'), ('\u{10280}', '\u{1029c}'), ('\u{102a0}',
+ '\u{102d0}'), ('\u{10300}', '\u{1031f}'), ('\u{10330}', '\u{10340}'), ('\u{10342}',
+ '\u{10349}'), ('\u{10350}', '\u{10375}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}',
+ '\u{103c3}'), ('\u{103c8}', '\u{103cf}'), ('\u{10450}', '\u{1049d}'), ('\u{10500}',
+ '\u{10527}'), ('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}',
+ '\u{10755}'), ('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}',
+ '\u{10808}'), ('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}',
+ '\u{1083c}'), ('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}',
+ '\u{1089e}'), ('\u{10900}', '\u{10915}'), ('\u{10920}', '\u{10939}'), ('\u{10980}',
+ '\u{109b7}'), ('\u{109be}', '\u{109bf}'), ('\u{10a00}', '\u{10a00}'), ('\u{10a10}',
+ '\u{10a13}'), ('\u{10a15}', '\u{10a17}'), ('\u{10a19}', '\u{10a33}'), ('\u{10a60}',
+ '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}',
+ '\u{10ae4}'), ('\u{10b00}', '\u{10b35}'), ('\u{10b40}', '\u{10b55}'), ('\u{10b60}',
+ '\u{10b72}'), ('\u{10b80}', '\u{10b91}'), ('\u{10c00}', '\u{10c48}'), ('\u{11003}',
+ '\u{11037}'), ('\u{11083}', '\u{110af}'), ('\u{110d0}', '\u{110e8}'), ('\u{11103}',
+ '\u{11126}'), ('\u{11150}', '\u{11172}'), ('\u{11176}', '\u{11176}'), ('\u{11183}',
+ '\u{111b2}'), ('\u{111c1}', '\u{111c4}'), ('\u{111da}', '\u{111da}'), ('\u{11200}',
+ '\u{11211}'), ('\u{11213}', '\u{1122b}'), ('\u{112b0}', '\u{112de}'), ('\u{11305}',
+ '\u{1130c}'), ('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}',
+ '\u{11330}'), ('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133d}',
+ '\u{1133d}'), ('\u{1135d}', '\u{11361}'), ('\u{11480}', '\u{114af}'), ('\u{114c4}',
+ '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), ('\u{11580}', '\u{115ae}'), ('\u{11600}',
+ '\u{1162f}'), ('\u{11644}', '\u{11644}'), ('\u{11680}', '\u{116aa}'), ('\u{118ff}',
+ '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), ('\u{12000}', '\u{12398}'), ('\u{13000}',
+ '\u{1342e}'), ('\u{16800}', '\u{16a38}'), ('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}',
+ '\u{16aed}'), ('\u{16b00}', '\u{16b2f}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}',
+ '\u{16b8f}'), ('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f50}'), ('\u{1b000}',
+ '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'), ('\u{1bc80}',
+ '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1e800}', '\u{1e8c4}'), ('\u{1ee00}',
+ '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'), ('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}',
+ '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'), ('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}',
+ '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'), ('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}',
+ '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'), ('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}',
+ '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'), ('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}',
+ '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'), ('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}',
+ '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'), ('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}',
+ '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'), ('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}',
+ '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'), ('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}',
+ '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'), ('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}',
+ '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'), ('\u{1eeab}', '\u{1eebb}'), ('\u{20000}',
+ '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'), ('\u{2b740}', '\u{2b81d}'), ('\u{2f800}',
+ '\u{2fa1d}')
];
pub const Lt_table: &'static [(char, char)] = &[
return Some(tmp);
}
- let mut buf = [0u16; 2];
+ let mut buf = [0; 2];
self.chars.next().map(|ch| {
let n = CharExt::encode_utf16(ch, &mut buf).unwrap_or(0);
if n == 2 { self.extra = buf[1]; }
-Subproject commit b89c3f039b61edbb077771eda2ee8a718dbec7e0
+Subproject commit bff69076975642c64e76dbeaa53476bfa7212086
//! Basic data structures for representing a book.
-use std::old_io::BufferedReader;
+use std::io::prelude::*;
+use std::io::BufReader;
use std::iter;
use std::iter::AdditiveIterator;
+use std::path::{Path, PathBuf};
pub struct BookItem {
pub title: String,
- pub path: Path,
- pub path_to_root: Path,
+ pub path: PathBuf,
+ pub path_to_root: PathBuf,
pub children: Vec<BookItem>,
}
}
/// Construct a book by parsing a summary (markdown table of contents).
-pub fn parse_summary<R: Reader>(input: R, src: &Path) -> Result<Book, Vec<String>> {
+pub fn parse_summary(input: &mut Read, src: &Path) -> Result<Book, Vec<String>> {
fn collapse(stack: &mut Vec<BookItem>,
top_items: &mut Vec<BookItem>,
to_level: usize) {
// always include the introduction
top_items.push(BookItem {
title: "Introduction".to_string(),
- path: Path::new("README.md"),
- path_to_root: Path::new("."),
+ path: PathBuf::new("README.md"),
+ path_to_root: PathBuf::new("."),
children: vec!(),
});
- for line_result in BufferedReader::new(input).lines() {
+ for line_result in BufReader::new(input).lines() {
let line = match line_result {
Ok(line) => line,
Err(err) => {
- errors.push(err.desc.to_string()); // FIXME: include detail
+ errors.push(err.to_string());
return Err(errors);
}
};
let title = line[start_bracket + 1..end_bracket].to_string();
let indent = &line[..star_idx];
- let path_from_root = match src.join(given_path).path_relative_from(src) {
- Some(p) => p,
+ let path_from_root = match src.join(given_path).relative_from(src) {
+ Some(p) => p.to_path_buf(),
None => {
errors.push(format!("paths in SUMMARY.md must be relative, \
but path '{}' for section '{}' is not.",
given_path, title));
- Path::new("")
+ PathBuf::new("")
}
};
- let path_to_root = Path::new(iter::repeat("../")
+ let path_to_root = PathBuf::new(&iter::repeat("../")
.take(path_from_root.components().count() - 1)
.collect::<String>());
let item = BookItem {
//! Implementation of the `build` subcommand, used to compile a book.
use std::env;
-use std::os;
-use std::old_io;
-use std::old_io::{fs, File, BufferedWriter, TempDir, IoResult};
+use std::fs::{self, File, TempDir};
+use std::io::prelude::*;
+use std::io::{self, BufWriter};
+use std::path::{Path, PathBuf};
use subcommand::Subcommand;
use term::Term;
-use error::{Error, CliResult, CommandResult};
+use error::{err, CliResult, CommandResult};
use book;
use book::{Book, BookItem};
use css;
pub fn parse_cmd(name: &str) -> Option<Box<Subcommand>> {
if name == "build" {
- Some(box Build as Box<Subcommand>)
+ Some(Box::new(Build))
} else {
None
}
}
-fn write_toc(book: &Book, path_to_root: &Path, out: &mut Writer) -> IoResult<()> {
+fn write_toc(book: &Book, path_to_root: &Path, out: &mut Write) -> io::Result<()> {
fn walk_items(items: &[BookItem],
section: &str,
path_to_root: &Path,
- out: &mut Writer) -> IoResult<()> {
+ out: &mut Write) -> io::Result<()> {
for (i, item) in items.iter().enumerate() {
try!(walk_item(item, &format!("{}{}.", section, i + 1)[..], path_to_root, out));
}
fn walk_item(item: &BookItem,
section: &str,
path_to_root: &Path,
- out: &mut Writer) -> IoResult<()> {
+ out: &mut Write) -> io::Result<()> {
try!(writeln!(out, "<li><a href='{}'><b>{}</b> {}</a>",
- path_to_root.join(item.path.with_extension("html")).display(),
+ path_to_root.join(&item.path.with_extension("html")).display(),
section,
item.title));
if !item.children.is_empty() {
fn render(book: &Book, tgt: &Path) -> CliResult<()> {
let tmp = try!(TempDir::new("rust-book"));
- for (section, item) in book.iter() {
- println!("{} {}", section, item.title);
-
- let out_path = tgt.join(item.path.dirname());
+ for (_section, item) in book.iter() {
+ let out_path = match item.path.parent() {
+ Some(p) => tgt.join(p),
+ None => tgt.to_path_buf(),
+ };
let src;
if env::args().len() < 3 {
- src = os::getcwd().unwrap().clone();
+ src = env::current_dir().unwrap().clone();
} else {
- src = Path::new(env::args().nth(2).unwrap().clone());
+ src = PathBuf::new(&env::args().nth(2).unwrap());
}
// preprocess the markdown, rerouting markdown references to html references
- let markdown_data = try!(File::open(&src.join(&item.path)).read_to_string());
- let preprocessed_path = tmp.path().join(item.path.filename().unwrap());
+ let mut markdown_data = String::new();
+ try!(File::open(&src.join(&item.path)).and_then(|mut f| {
+ f.read_to_string(&mut markdown_data)
+ }));
+ let preprocessed_path = tmp.path().join(item.path.file_name().unwrap());
{
let urls = markdown_data.replace(".md)", ".html)");
- try!(File::create(&preprocessed_path)
- .write_str(&urls[..]));
+ try!(File::create(&preprocessed_path).and_then(|mut f| {
+ f.write_all(urls.as_bytes())
+ }));
}
// write the prelude to a temporary HTML file for rustdoc inclusion
let prelude = tmp.path().join("prelude.html");
{
- let mut toc = BufferedWriter::new(try!(File::create(&prelude)));
+ let mut toc = BufWriter::new(try!(File::create(&prelude)));
try!(writeln!(&mut toc, r#"<div id="nav">
<button id="toggle-nav">
<span class="sr-only">Toggle navigation</span>
// write the postlude to a temporary HTML file for rustdoc inclusion
let postlude = tmp.path().join("postlude.html");
{
- let mut toc = BufferedWriter::new(try!(File::create(&postlude)));
- try!(toc.write_str(javascript::JAVASCRIPT));
+ let mut toc = BufWriter::new(try!(File::create(&postlude)));
+ try!(toc.write_all(javascript::JAVASCRIPT.as_bytes()));
try!(writeln!(&mut toc, "</div></div>"));
}
- try!(fs::mkdir_recursive(&out_path, old_io::USER_DIR));
+ try!(fs::create_dir_all(&out_path));
let rustdoc_args: &[String] = &[
"".to_string(),
if output_result != 0 {
let message = format!("Could not execute `rustdoc` with {:?}: {}",
rustdoc_args, output_result);
- return Err(box message as Box<Error>);
+ return Err(err(&message));
}
}
}
fn usage(&self) {}
fn execute(&mut self, term: &mut Term) -> CommandResult<()> {
- let cwd = os::getcwd().unwrap();
+ let cwd = env::current_dir().unwrap();
let src;
let tgt;
if env::args().len() < 3 {
src = cwd.clone();
} else {
- src = Path::new(env::args().nth(2).unwrap().clone());
+ src = PathBuf::new(&env::args().nth(2).unwrap());
}
if env::args().len() < 4 {
tgt = cwd.join("_book");
} else {
- tgt = Path::new(env::args().nth(3).unwrap().clone());
+ tgt = PathBuf::new(&env::args().nth(3).unwrap());
}
- try!(fs::mkdir(&tgt, old_io::USER_DIR));
+ try!(fs::create_dir(&tgt));
- try!(File::create(&tgt.join("rust-book.css")).write_str(css::STYLE));
+ try!(File::create(&tgt.join("rust-book.css")).and_then(|mut f| {
+ f.write_all(css::STYLE.as_bytes())
+ }));
- let summary = try!(File::open(&src.join("SUMMARY.md")));
- match book::parse_summary(summary, &src) {
+ let mut summary = try!(File::open(&src.join("SUMMARY.md")));
+ match book::parse_summary(&mut summary, &src) {
Ok(book) => {
// execute rustdoc on the whole book
render(&book, &tgt)
term.err(&format!("error: {}", err)[..]);
}
- Err(box format!("{} errors occurred", n) as Box<Error>)
+ Err(err(&format!("{} errors occurred", n)))
}
}
}
//! Error handling utilities. WIP.
+use std::error::Error;
use std::fmt;
-use std::fmt::{Debug, Formatter};
-
-use std::old_io::IoError;
pub type CliError = Box<Error + 'static>;
pub type CliResult<T> = Result<T, CliError>;
pub type CommandError = Box<Error + 'static>;
pub type CommandResult<T> = Result<T, CommandError>;
-pub trait Error {
- fn description(&self) -> &str;
-
- fn detail(&self) -> Option<&str> { None }
- fn cause(&self) -> Option<&Error> { None }
-}
-
-pub trait FromError<E> {
- fn from_err(err: E) -> Self;
-}
-
-impl Debug for Box<Error + 'static> {
- fn fmt(&self, f: &mut Formatter) -> fmt::Result {
- write!(f, "{}", self.description())
- }
-}
-
-impl<E: Error + 'static> FromError<E> for Box<Error + 'static> {
- fn from_err(err: E) -> Box<Error + 'static> {
- box err as Box<Error>
- }
-}
+pub fn err(s: &str) -> CliError {
+ struct E(String);
-impl<'a> Error for &'a str {
- fn description<'b>(&'b self) -> &'b str {
- *self
+ impl Error for E {
+ fn description(&self) -> &str { &self.0 }
}
-}
-
-impl Error for String {
- fn description<'a>(&'a self) -> &'a str {
- &self[..]
+ impl fmt::Display for E {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.0.fmt(f)
+ }
}
-}
-
-impl<'a> Error for Box<Error + 'a> {
- fn description(&self) -> &str { (**self).description() }
- fn detail(&self) -> Option<&str> { (**self).detail() }
- fn cause(&self) -> Option<&Error> { (**self).cause() }
-}
-
-impl FromError<()> for () {
- fn from_err(_: ()) -> () { () }
-}
-impl FromError<IoError> for IoError {
- fn from_err(error: IoError) -> IoError { error }
+ Box::new(E(s.to_string()))
}
-
-impl Error for IoError {
- fn description(&self) -> &str {
- self.desc
- }
- fn detail(&self) -> Option<&str> {
- self.detail.as_ref().map(|s| &s[..])
- }
-}
-
-
-//fn iter_map_err<T, U, E, I: Iterator<Result<T,E>>>(iter: I,
pub fn parse_cmd(name: &str) -> Option<Box<Subcommand>> {
match name {
- "help" | "--help" | "-h" | "-?" => Some(box Help as Box<Subcommand>),
+ "help" | "--help" | "-h" | "-?" => Some(Box::new(Help)),
_ => None
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(box_syntax)]
-#![feature(collections)]
+#![deny(warnings)]
+
#![feature(core)]
+#![feature(exit_status)]
+#![feature(fs)]
+#![feature(io)]
#![feature(old_io)]
-#![feature(env)]
-#![feature(os)]
-#![feature(old_path)]
+#![feature(path)]
#![feature(rustdoc)]
+#![feature(tempdir)]
extern crate rustdoc;
use std::env;
+use std::error::Error;
use subcommand::Subcommand;
use term::Term;
-macro_rules! try (
- ($expr:expr) => ({
- use error;
- match $expr {
- Ok(val) => val,
- Err(err) => return Err(error::FromError::from_err(err))
- }
- })
-);
-
mod term;
mod error;
mod book;
} else {
match subcommand::parse_name(&cmd[1][..]) {
Some(mut subcmd) => {
- match subcmd.parse_args(cmd.tail()) {
+ match subcmd.parse_args(&cmd[..cmd.len()-1]) {
Ok(_) => {
match subcmd.execute(&mut term) {
Ok(_) => (),
Err(err) => {
- term.err(&format!("error: {}", err.description())[..]);
- err.detail().map(|detail| {
- term.err(&format!("detail: {}", detail)[..]);
- });
+ term.err(&format!("error: {}", err));
}
}
}
pub fn parse_cmd(name: &str) -> Option<Box<Subcommand>> {
if name == "serve" {
- Some(box Serve as Box<Subcommand>)
+ Some(Box::new(Serve))
} else {
None
}
impl Term {
pub fn new() -> Term {
Term {
- err: box stdio::stderr() as Box<Writer>,
+ err: Box::new(stdio::stderr())
}
}
//! Implementation of the `test` subcommand. Just a stub for now.
use subcommand::Subcommand;
-use error::CliResult;
-use error::CommandResult;
-use error::Error;
+use error::{err, CliResult, CommandResult};
use term::Term;
use book;
-use std::old_io::{Command, File};
-use std::os;
+
+use std::fs::File;
+use std::env;
+use std::process::Command;
struct Test;
pub fn parse_cmd(name: &str) -> Option<Box<Subcommand>> {
if name == "test" {
- Some(box Test as Box<Subcommand>)
+ Some(Box::new(Test))
} else {
None
}
}
fn usage(&self) {}
fn execute(&mut self, term: &mut Term) -> CommandResult<()> {
- let cwd = os::getcwd().unwrap();
+ let cwd = env::current_dir().unwrap();
let src = cwd.clone();
- let summary = File::open(&src.join("SUMMARY.md"));
- match book::parse_summary(summary, &src) {
+ let mut summary = try!(File::open(&src.join("SUMMARY.md")));
+ match book::parse_summary(&mut summary, &src) {
Ok(book) => {
for (_, item) in book.iter() {
let output_result = Command::new("rustdoc")
Ok(output) => {
if !output.status.success() {
term.err(&format!("{}\n{}",
- String::from_utf8_lossy(&output.output[..]),
- String::from_utf8_lossy(&output.error[..]))[..]);
- return Err(box "Some tests failed." as Box<Error>);
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr)));
+ return Err(err("some tests failed"));
}
}
Err(e) => {
- let message = format!("Could not execute `rustdoc`: {}", e);
- return Err(box message as Box<Error>);
+ let message = format!("could not execute `rustdoc`: {}", e);
+ return Err(err(&message))
}
}
}
for err in errors {
term.err(&err[..]);
}
- return Err(box "There was an error." as Box<Error>);
+ return Err(err("there was an error"))
}
}
Ok(()) // lol
# If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2015-02-19
+2015-03-04
S 2015-02-25 880fb89
+ bitrig-x86_64 8cdc4ca0a80103100f46cbf8caa9fe497df048c5
freebsd-x86_64 f4cbe4227739de986444211f8ee8d74745ab8f7f
linux-i386 3278ebbce8cb269acc0614dac5ddac07eab6a99c
linux-x86_64 72287d0d88de3e5a53bae78ac0d958e1a7637d73
}
impl cat {
- pub fn speak(&mut self) { self.meows += 1_usize; }
+ pub fn speak(&mut self) { self.meows += 1; }
pub fn meow_count(&mut self) -> uint { self.meows }
}
impl cat {
pub fn meow(&mut self) {
println!("Meow");
- self.meows += 1_usize;
- if self.meows % 5_usize == 0_usize {
+ self.meows += 1;
+ if self.meows % 5 == 0 {
self.how_hungry += 1;
}
}
impl cat {
fn meow(&mut self) {
println!("Meow");
- self.meows += 1_usize;
- if self.meows % 5_usize == 0_usize {
+ self.meows += 1;
+ if self.meows % 5 == 0 {
self.how_hungry += 1;
}
}
let mut i = *self;
while i < v {
f(i);
- i += 1_usize;
+ i += 1;
}
}
}
#[inline]
pub fn iter<T, F>(v: &[T], mut f: F) where F: FnMut(&T) {
- let mut i = 0_usize;
+ let mut i = 0;
let n = v.len();
while i < n {
f(&v[i]);
- i += 1_usize;
+ i += 1;
}
}
// same as cci_iter_lib, more-or-less, but not marked inline
pub fn iter<F>(v: Vec<uint> , mut f: F) where F: FnMut(uint) {
- let mut i = 0_usize;
+ let mut i = 0;
let n = v.len();
while i < n {
f(v[i]);
- i += 1_usize;
+ i += 1;
}
}
--- /dev/null
+// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "rlib"]
+#![omit_gdb_pretty_printer_section]
+
+// no-prefer-dynamic
+// compile-flags:-g
+
+pub fn generic_function<T: Clone>(val: T) -> (T, T) {
+ let result = (val.clone(), val.clone());
+ let a_variable: u32 = 123456789;
+ let another_variable: f64 = 123456789.5;
+ zzz();
+ result
+}
+
+#[inline(never)]
+fn zzz() {()}
\ No newline at end of file
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(staged_api, allow_internal_unstable)]
+#![staged_api]
+#![stable(feature = "stable", since = "1.0.0")]
+
+#[unstable(feature = "function")]
+pub fn unstable() {}
+
+
+#[stable(feature = "stable", since = "1.0.0")]
+pub struct Foo {
+ #[unstable(feature = "struct_field")]
+ pub x: u8
+}
+
+#[allow_internal_unstable]
+#[macro_export]
+macro_rules! call_unstable_allow {
+ () => { $crate::unstable() }
+}
+
+#[allow_internal_unstable]
+#[macro_export]
+macro_rules! construct_unstable_allow {
+ ($e: expr) => {
+ $crate::Foo { x: $e }
+ }
+}
+
+#[allow_internal_unstable]
+#[macro_export]
+macro_rules! pass_through_allow {
+ ($e: expr) => { $e }
+}
+
+#[macro_export]
+macro_rules! call_unstable_noallow {
+ () => { $crate::unstable() }
+}
+
+#[macro_export]
+macro_rules! construct_unstable_noallow {
+ ($e: expr) => {
+ $crate::Foo { x: $e }
+ }
+}
+
+#[macro_export]
+macro_rules! pass_through_noallow {
+ ($e: expr) => { $e }
+}
#![crate_type = "dylib"]
#[macro_export]
macro_rules! reexported {
- () => ( 3_usize )
+ () => ( 3 )
}
let args = reg.args().clone();
reg.register_syntax_extension(token::intern("plugin_args"),
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
- NormalTT(Box::new(Expander { args: args, }), None));
+ NormalTT(Box::new(Expander { args: args, }), None, false));
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// force-host
+
+#![crate_type="dylib"]
+#![feature(plugin_registrar, quote)]
+
+extern crate syntax;
+extern crate rustc;
+
+use syntax::codemap::Span;
+use syntax::parse::token::{self, str_to_ident, NtExpr, NtPat};
+use syntax::ast::{TokenTree, TtToken, Pat};
+use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
+use syntax::ext::build::AstBuilder;
+use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
+use syntax::ext::tt::macro_parser::{Success, Failure, Error};
+use syntax::ptr::P;
+use rustc::plugin::Registry;
+
+fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
+ -> Box<MacResult + 'static> {
+
+ let mbe_matcher = quote_matcher!(cx, $matched:expr, $($pat:pat)|+);
+
+ let mac_expr = match TokenTree::parse(cx, &mbe_matcher[..], args) {
+ Success(map) => {
+ match (&*map[str_to_ident("matched")], &*map[str_to_ident("pat")]) {
+ (&MatchedNonterminal(NtExpr(ref matched_expr)),
+ &MatchedSeq(ref pats, seq_sp)) => {
+ let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt|
+ if let &MatchedNonterminal(NtPat(ref pat)) = &**pat_nt {
+ pat.clone()
+ } else {
+ unreachable!()
+ }
+ ).collect();
+ let arm = cx.arm(seq_sp, pats, cx.expr_bool(seq_sp, true));
+
+ quote_expr!(cx,
+ match $matched_expr {
+ $arm
+ _ => false
+ }
+ )
+ }
+ _ => unreachable!()
+ }
+ }
+ Failure(_, s) | Error(_, s) => {
+ panic!("expected Success, but got Error/Failure: {}", s);
+ }
+ };
+
+ MacEager::expr(mac_expr)
+}
+
+#[plugin_registrar]
+pub fn plugin_registrar(reg: &mut Registry) {
+ reg.register_macro("matches", expand_mbe_matches);
+}
};
let mut text = &*text;
- let mut total = 0_usize;
+ let mut total = 0;
while !text.is_empty() {
match NUMERALS.iter().find(|&&(rn, _)| text.starts_with(rn)) {
Some(&(rn, val)) => {
#[inline]
pub fn has_closures() -> uint {
- let x = 1_usize;
+ let x = 1;
let mut f = move || x;
- let y = 1_usize;
+ let y = 1;
let g = || y;
f() + g()
}
*x = random_gradient(&mut rng);
}
- let mut permutations = [0i32; 256];
+ let mut permutations = [0; 256];
for (i, x) in permutations.iter_mut().enumerate() {
*x = i as i32;
}
to_rendezvous: Sender<CreatureInfo>,
to_rendezvous_log: Sender<String>
) {
- let mut creatures_met = 0i32;
+ let mut creatures_met = 0;
let mut evil_clones_met = 0;
let mut rendezvous = from_rendezvous.iter();
}
fn get(&mut self, mut idx: i32) -> P {
- let mut pp = [0u8; 16];
+ let mut pp = [0; 16];
self.permcount = idx as u32;
for (i, place) in self.perm.p.iter_mut().enumerate() {
*place = i as i32 + 1;
let n = std::env::args()
.nth(1)
.and_then(|arg| arg.parse().ok())
- .unwrap_or(2i32);
+ .unwrap_or(2);
let (checksum, maxflips) = fannkuch(n);
println!("{}\nPfannkuchen({}) = {}", checksum, n, maxflips);
fn make(&mut self, n: usize) -> IoResult<()> {
let alu_len = self.alu.len();
- let mut buf = repeat(0u8).take(alu_len + LINE_LEN).collect::<Vec<_>>();
+ let mut buf = repeat(0).take(alu_len + LINE_LEN).collect::<Vec<_>>();
let alu: &[u8] = self.alu.as_bytes();
copy_memory(&mut buf, alu);
-> std::old_io::IoResult<()>
{
try!(wr.write(header.as_bytes()));
- let mut line = [0u8; LINE_LENGTH + 1];
+ let mut line = [0; LINE_LENGTH + 1];
while n > 0 {
let nb = min(LINE_LENGTH, n);
for i in 0..nb {
}
fn rotate(&self, c: u8, frame: usize) -> Code {
- Code(self.push_char(c).hash() & ((1u64 << (2 * frame)) - 1))
+ Code(self.push_char(c).hash() & ((1 << (2 * frame)) - 1))
}
fn pack(string: &str) -> Code {
- string.bytes().fold(Code(0u64), |a, b| a.push_char(b))
+ string.bytes().fold(Code(0), |a, b| a.push_char(b))
}
fn unpack(&self, frame: usize) -> String {
.map(|(id, p)| transform(p, id != 3))
.collect();
- (0i32..50).map(|yx| {
+ (0..50).map(|yx| {
transforms.iter().enumerate().map(|(id, t)| {
t.iter().filter_map(|p| mask(yx / 5, yx % 5, id, p)).collect()
}).collect()
// Gets the identifier of a mask.
fn get_id(m: u64) -> u8 {
- for id in 0u8..10 {
+ for id in 0..10 {
if m & (1 << (id + 50) as usize) != 0 {return id;}
}
panic!("{:016x} does not have a valid identifier", m);
reader.read_line(&mut s).unwrap();
assert_eq!(s, "9,9\n");
- let mut g = repeat(vec![0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8])
+ let mut g = repeat(vec![0, 0, 0, 0, 0, 0, 0, 0, 0])
.take(10).collect::<Vec<_>>();
for line in reader.lines() {
let line = line.unwrap();
// solve sudoku grid
pub fn solve(&mut self) {
let mut work: Vec<(u8, u8)> = Vec::new(); /* queue of uncolored fields */
- for row in 0u8..9u8 {
- for col in 0u8..9u8 {
+ for row in 0..9 {
+ for col in 0..9 {
let color = self.grid[row as usize][col as usize];
- if color == 0u8 {
+ if color == 0 {
work.push((row, col));
}
}
}
fn next_color(&mut self, row: u8, col: u8, start_color: u8) -> bool {
- if start_color < 10u8 {
+ if start_color < 10 {
// colors not yet used
let mut avail: Box<_> = box Colors::new(start_color);
// find first remaining color that is available
let next = avail.next();
self.grid[row as usize][col as usize] = next;
- return 0u8 != next;
+ return 0 != next;
}
- self.grid[row as usize][col as usize] = 0u8;
+ self.grid[row as usize][col as usize] = 0;
return false;
}
// find colors available in neighbourhood of (row, col)
fn drop_colors(&mut self, avail: &mut Colors, row: u8, col: u8) {
- for idx in 0u8..9u8 {
+ for idx in 0..9 {
/* check same column fields */
avail.remove(self.grid[idx as usize][col as usize]);
/* check same row fields */
}
// check same block fields
- let row0 = (row / 3u8) * 3u8;
- let col0 = (col / 3u8) * 3u8;
- for alt_row in row0..row0 + 3u8 {
- for alt_col in col0..col0 + 3u8 {
+ let row0 = (row / 3) * 3;
+ let col0 = (col / 3) * 3;
+ for alt_row in row0..row0 + 3 {
+ for alt_col in col0..col0 + 3 {
avail.remove(self.grid[alt_row as usize][alt_col as usize]);
}
}
// Stores available colors as simple bitfield, bit 0 is always unset
struct Colors(u16);
-static HEADS: u16 = (1u16 << 10) - 1; /* bits 9..0 */
+static HEADS: u16 = (1 << 10) - 1; /* bits 9..0 */
impl Colors {
fn new(start_color: u8) -> Colors {
// Sets bits 9..start_color
- let tails = !0u16 << start_color as usize;
+ let tails = !0 << start_color as usize;
return Colors(HEADS & tails);
}
fn next(&self) -> u8 {
let Colors(c) = *self;
let val = c & HEADS;
- if 0u16 == val {
- return 0u8;
+ if 0 == val {
+ return 0;
} else {
return val.trailing_zeros() as u8
}
}
fn remove(&mut self, color: u8) {
- if color != 0u8 {
+ if color != 0 {
let Colors(val) = *self;
- let mask = !(1u16 << color as usize);
+ let mask = !(1 << color as usize);
*self = Colors(val & mask);
}
}
static DEFAULT_SUDOKU: [[u8;9];9] = [
/* 0 1 2 3 4 5 6 7 8 */
- /* 0 */ [0u8, 4u8, 0u8, 6u8, 0u8, 0u8, 0u8, 3u8, 2u8],
- /* 1 */ [0u8, 0u8, 8u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8],
- /* 2 */ [7u8, 0u8, 0u8, 8u8, 0u8, 0u8, 0u8, 0u8, 0u8],
- /* 3 */ [0u8, 0u8, 0u8, 5u8, 0u8, 0u8, 0u8, 0u8, 0u8],
- /* 4 */ [0u8, 5u8, 0u8, 0u8, 0u8, 3u8, 6u8, 0u8, 0u8],
- /* 5 */ [6u8, 8u8, 0u8, 0u8, 0u8, 0u8, 0u8, 9u8, 0u8],
- /* 6 */ [0u8, 9u8, 5u8, 0u8, 0u8, 6u8, 0u8, 7u8, 0u8],
- /* 7 */ [0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 6u8, 0u8],
- /* 8 */ [4u8, 0u8, 0u8, 0u8, 0u8, 7u8, 2u8, 0u8, 3u8]
+ /* 0 */ [0, 4, 0, 6, 0, 0, 0, 3, 2],
+ /* 1 */ [0, 0, 8, 0, 2, 0, 0, 0, 0],
+ /* 2 */ [7, 0, 0, 8, 0, 0, 0, 0, 0],
+ /* 3 */ [0, 0, 0, 5, 0, 0, 0, 0, 0],
+ /* 4 */ [0, 5, 0, 0, 0, 3, 6, 0, 0],
+ /* 5 */ [6, 8, 0, 0, 0, 0, 0, 9, 0],
+ /* 6 */ [0, 9, 5, 0, 0, 6, 0, 7, 0],
+ /* 7 */ [0, 0, 0, 0, 4, 0, 0, 6, 0],
+ /* 8 */ [4, 0, 0, 0, 0, 7, 2, 0, 3]
];
#[cfg(test)]
static DEFAULT_SOLUTION: [[u8;9];9] = [
/* 0 1 2 3 4 5 6 7 8 */
- /* 0 */ [1u8, 4u8, 9u8, 6u8, 7u8, 5u8, 8u8, 3u8, 2u8],
- /* 1 */ [5u8, 3u8, 8u8, 1u8, 2u8, 9u8, 7u8, 4u8, 6u8],
- /* 2 */ [7u8, 2u8, 6u8, 8u8, 3u8, 4u8, 1u8, 5u8, 9u8],
- /* 3 */ [9u8, 1u8, 4u8, 5u8, 6u8, 8u8, 3u8, 2u8, 7u8],
- /* 4 */ [2u8, 5u8, 7u8, 4u8, 9u8, 3u8, 6u8, 1u8, 8u8],
- /* 5 */ [6u8, 8u8, 3u8, 7u8, 1u8, 2u8, 5u8, 9u8, 4u8],
- /* 6 */ [3u8, 9u8, 5u8, 2u8, 8u8, 6u8, 4u8, 7u8, 1u8],
- /* 7 */ [8u8, 7u8, 2u8, 3u8, 4u8, 1u8, 9u8, 6u8, 5u8],
- /* 8 */ [4u8, 6u8, 1u8, 9u8, 5u8, 7u8, 2u8, 8u8, 3u8]
+ /* 0 */ [1, 4, 9, 6, 7, 5, 8, 3, 2],
+ /* 1 */ [5, 3, 8, 1, 2, 9, 7, 4, 6],
+ /* 2 */ [7, 2, 6, 8, 3, 4, 1, 5, 9],
+ /* 3 */ [9, 1, 4, 5, 6, 8, 3, 2, 7],
+ /* 4 */ [2, 5, 7, 4, 9, 3, 6, 1, 8],
+ /* 5 */ [6, 8, 3, 7, 1, 2, 5, 9, 4],
+ /* 6 */ [3, 9, 5, 2, 8, 6, 4, 7, 1],
+ /* 7 */ [8, 7, 2, 3, 4, 1, 9, 6, 5],
+ /* 8 */ [4, 6, 1, 9, 5, 7, 2, 8, 3]
];
#[test]
fn colors_new_works() {
- assert_eq!(*Colors::new(1), 1022u16);
- assert_eq!(*Colors::new(2), 1020u16);
- assert_eq!(*Colors::new(3), 1016u16);
- assert_eq!(*Colors::new(4), 1008u16);
- assert_eq!(*Colors::new(5), 992u16);
- assert_eq!(*Colors::new(6), 960u16);
- assert_eq!(*Colors::new(7), 896u16);
- assert_eq!(*Colors::new(8), 768u16);
- assert_eq!(*Colors::new(9), 512u16);
+ assert_eq!(*Colors::new(1), 1022);
+ assert_eq!(*Colors::new(2), 1020);
+ assert_eq!(*Colors::new(3), 1016);
+ assert_eq!(*Colors::new(4), 1008);
+ assert_eq!(*Colors::new(5), 992);
+ assert_eq!(*Colors::new(6), 960);
+ assert_eq!(*Colors::new(7), 896);
+ assert_eq!(*Colors::new(8), 768);
+ assert_eq!(*Colors::new(9), 512);
}
#[test]
fn colors_next_works() {
- assert_eq!(Colors(0).next(), 0u8);
- assert_eq!(Colors(2).next(), 1u8);
- assert_eq!(Colors(4).next(), 2u8);
- assert_eq!(Colors(8).next(), 3u8);
- assert_eq!(Colors(16).next(), 4u8);
- assert_eq!(Colors(32).next(), 5u8);
- assert_eq!(Colors(64).next(), 6u8);
- assert_eq!(Colors(128).next(), 7u8);
- assert_eq!(Colors(256).next(), 8u8);
- assert_eq!(Colors(512).next(), 9u8);
- assert_eq!(Colors(1024).next(), 0u8);
+ assert_eq!(Colors(0).next(), 0);
+ assert_eq!(Colors(2).next(), 1);
+ assert_eq!(Colors(4).next(), 2);
+ assert_eq!(Colors(8).next(), 3);
+ assert_eq!(Colors(16).next(), 4);
+ assert_eq!(Colors(32).next(), 5);
+ assert_eq!(Colors(64).next(), 6);
+ assert_eq!(Colors(128).next(), 7);
+ assert_eq!(Colors(256).next(), 8);
+ assert_eq!(Colors(512).next(), 9);
+ assert_eq!(Colors(1024).next(), 0);
}
#[test]
colors.remove(1);
// THEN
- assert_eq!(colors.next(), 2u8);
+ assert_eq!(colors.next(), 2);
}
#[test]
fn main() {
match () {
- Trait { x: 42_usize } => () //~ ERROR use of trait `Trait` in a struct pattern
+ Trait { x: 42 } => () //~ ERROR use of trait `Trait` in a struct pattern
}
}
// except according to those terms.
fn main() {
- let _x: i32 = [1i32, 2, 3];
+ let _x: i32 = [1, 2, 3];
//~^ ERROR mismatched types
//~| expected `i32`
- //~| found `[i32; 3]`
+ //~| found `[_; 3]`
//~| expected i32
//~| found array of 3 elements
- let x: &[i32] = &[1i32, 2, 3];
+ let x: &[i32] = &[1, 2, 3];
let _y: &i32 = x;
//~^ ERROR mismatched types
//~| expected `&i32`
let x: isize;
let y: isize;
unsafe {
- asm!("mov $1, $0" : "=r"(x) : "=r"(5_usize)); //~ ERROR operand constraint contains '='
- asm!("mov $1, $0" : "=r"(y) : "+r"(5_usize)); //~ ERROR operand constraint contains '+'
+ asm!("mov $1, $0" : "=r"(x) : "=r"(5)); //~ ERROR operand constraint contains '='
+ asm!("mov $1, $0" : "=r"(y) : "+r"(5)); //~ ERROR operand constraint contains '+'
}
foo(x);
foo(y);
x = 1; //~ NOTE prior assignment occurs here
foo(x);
unsafe {
- asm!("mov $1, $0" : "=r"(x) : "r"(5_usize));
+ asm!("mov $1, $0" : "=r"(x) : "r"(5));
//~^ ERROR re-assignment of immutable variable `x`
}
foo(x);
pub fn main() {
let x: isize;
unsafe {
- asm!("mov $1, $0" : "r"(x) : "r"(5_usize)); //~ ERROR output operand constraint lacks '='
+ asm!("mov $1, $0" : "r"(x) : "r"(5)); //~ ERROR output operand constraint lacks '='
}
foo(x);
}
}
impl cat {
- pub fn speak(&self) { self.meows += 1_usize; }
+ pub fn speak(&self) { self.meows += 1; }
}
fn cat(in_x : usize, in_y : isize) -> cat {
}
fn main() {
- let nyan : cat = cat(52_usize, 99);
+ let nyan : cat = cat(52, 99);
nyan.speak = || println!("meow"); //~ ERROR attempted to take value of method
}
// Tests that a function with a ! annotation always actually fails
fn bad_bang(i: usize) -> ! {
- return 7_usize; //~ ERROR `return` in a function declared as diverging [E0166]
+ return 7; //~ ERROR `return` in a function declared as diverging [E0166]
}
fn main() { bad_bang(5); }
// Tests that a function with a ! annotation always actually fails
fn bad_bang(i: usize) -> ! { //~ ERROR computation may converge in a function marked as diverging
- if i < 0_usize { } else { panic!(); }
+ if i < 0 { } else { panic!(); }
}
fn main() { bad_bang(5); }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-static i: String = 10i32;
+static i: String = 10;
//~^ ERROR mismatched types
//~| expected `collections::string::String`
-//~| found `i32`
+//~| found `_`
//~| expected struct `collections::string::String`
-//~| found i32
+//~| found integral variable
fn main() { println!("{}", i); }
// except according to those terms.
fn foo<T:'static>() {
- 1_usize.bar::<T>(); //~ ERROR `core::marker::Send` is not implemented
+ 1.bar::<T>(); //~ ERROR `core::marker::Send` is not implemented
}
trait bar {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern:`&&` cannot be applied to type `i32`
+// error-pattern:`&&` cannot be applied to type `_`
-fn main() { let x = 1i32 && 2i32; }
+fn main() { let x = 1 && 2; }
fn main() {
// By-ref captures
{
- let mut x = 0_usize;
+ let mut x = 0;
let _f = to_fn(|| x = 42); //~ ERROR cannot assign
- let mut y = 0_usize;
+ let mut y = 0;
let _g = to_fn(|| set(&mut y)); //~ ERROR cannot borrow
- let mut z = 0_usize;
+ let mut z = 0;
let _h = to_fn_mut(|| { set(&mut z); to_fn(|| z = 42); }); //~ ERROR cannot assign
}
// By-value captures
{
- let mut x = 0_usize;
+ let mut x = 0;
let _f = to_fn(move || x = 42); //~ ERROR cannot assign
- let mut y = 0_usize;
+ let mut y = 0;
let _g = to_fn(move || set(&mut y)); //~ ERROR cannot borrow
- let mut z = 0_usize;
+ let mut z = 0;
let _h = to_fn_mut(move || { set(&mut z); to_fn(move || z = 42); }); //~ ERROR cannot assign
}
}
}
fn blah() {
- let f = &Foo::Foo1(box 1u32, box 2u32);
+ let f = &Foo::Foo1(box 1, box 2);
match *f { //~ ERROR cannot move out of
Foo::Foo1(num1, //~ NOTE attempting to move value to here
num2) => (), //~ NOTE and here
#![allow(dead_code)]
fn main() {
// Original borrow ends at end of function
- let mut x = 1_usize;
+ let mut x = 1;
let y = &mut x;
let z = &x; //~ ERROR cannot borrow
}
match true {
true => {
// Original borrow ends at end of match arm
- let mut x = 1_usize;
+ let mut x = 1;
let y = &x;
let z = &mut x; //~ ERROR cannot borrow
}
fn bar() {
// Original borrow ends at end of closure
|| {
- let mut x = 1_usize;
+ let mut x = 1;
let y = &mut x;
let z = &mut x; //~ ERROR cannot borrow
};
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! foo {
+ () => {
+ #[cfg_attr(all(), unknown)] //~ ERROR `unknown` is currently unknown
+ fn foo() {}
+ }
+}
+
+foo!();
+
+fn main() {}
}
fn main() {
- let nyan = cat(0_usize);
+ let nyan = cat(0);
}
fn sleep(&self) { loop{} }
fn meow(&self) {
println!("Meow");
- meows += 1_usize; //~ ERROR unresolved name
+ meows += 1; //~ ERROR unresolved name
sleep(); //~ ERROR unresolved name
}
// Tests that we forbid coercion from `[T; n]` to `&[T]`
fn main() {
- let _: &[i32] = [0i32];
+ let _: &[i32] = [0];
//~^ ERROR mismatched types
//~| expected `&[i32]`
- //~| found `[i32; 1]`
+ //~| found `[_; 1]`
//~| expected &-ptr
//~| found array of 1 elements
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-const A: usize = { 1_usize; 2 };
+const A: usize = { 1; 2 };
//~^ ERROR: blocks in constants are limited to items and tail expressions
const B: usize = { { } 2 };
}
const C: usize = { foo!(); 2 };
-const D: usize = { let x = 4_usize; 2 };
+const D: usize = { let x = 4; 2 };
//~^ ERROR: blocks in constants are limited to items and tail expressions
pub fn main() {
struct A<T>
where T : Trait,
T : Add<T::Item>
- //~^ ERROR illegal recursive type
+ //~^ ERROR unsupported cyclic reference between types/traits detected
{
data: T
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test a cycle where a type parameter on a trait has a default that
+// again references the trait.
+
+trait Foo<X = Box<Foo>> {
+ //~^ ERROR unsupported cyclic reference
+}
+
+fn main() { }
// a direct participant in the cycle.
trait A: B {
+ //~^ ERROR unsupported cyclic reference
}
-trait B: C { }
+trait B: C {
+ //~^ ERROR unsupported cyclic reference
+}
trait C: B { }
//~^ ERROR unsupported cyclic reference
impl T for S { }
#[derive(PartialEq)] //~ ERROR: `derive` may only be applied to structs and enums
-static s: usize = 0_usize;
+static s: usize = 0;
#[derive(PartialEq)] //~ ERROR: `derive` may only be applied to structs and enums
-const c: usize = 0_usize;
+const c: usize = 0;
#[derive(PartialEq)] //~ ERROR: `derive` may only be applied to structs and enums
mod m { }
fn main() {
let b = {
- let a = Box::new(RefCell::new(4i8));
- *a.borrow() + 1i8 //~ ERROR `*a` does not live long enough
+ let a = Box::new(RefCell::new(4));
+ *a.borrow() + 1 //~ ERROR `*a` does not live long enough
};
println!("{}", b);
}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-trait Foo {}
-
-fn foo<T: Foo + Foo>() {} //~ ERROR `Foo` already appears in the list of bounds
-
-fn main() {}
// except according to those terms.
// compile-flags: --extern std=
-// error-pattern: is not a file
+// error-pattern: can't find crate for `std`
fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! bar {
+ () => {
+ // more layers don't help:
+ #[allow_internal_unstable]
+ macro_rules! baz { //~ ERROR allow_internal_unstable side-steps
+ () => {}
+ }
+ }
+}
+
+bar!();
+
+fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
+macro_rules! foo {
+ () => {}
+}
+
+fn main() {}
// test. Not ideal, but oh well :(
fn main() {
- let a = &[1i32, 2, 3];
+ let a = &[1, 2, 3];
println!("{}", {
extern "rust-intrinsic" { //~ ERROR intrinsics are subject to change
fn atomic_fence();
mod circ1 {
pub use circ2::f2;
pub fn f1() { println!("f1"); }
- pub fn common() -> usize { return 0_usize; }
+ pub fn common() -> usize { return 0; }
}
mod circ2 {
pub use circ1::f1;
pub fn f2() { println!("f2"); }
- pub fn common() -> usize { return 1_usize; }
+ pub fn common() -> usize { return 1; }
}
mod test {
// except according to those terms.
fn main() {
- (return)[0_usize]; //~ ERROR the type of this value must be known in this context
+ (return)[0]; //~ ERROR the type of this value must be known in this context
}
}
fn function<T:ToOpt + Clone>(counter: usize, t: T) {
- if counter > 0_usize {
- function(counter - 1_usize, t.to_option());
+ if counter > 0 {
+ function(counter - 1, t.to_option());
// FIXME(#4287) Error message should be here. It should be
// a type error to instantiate `test` at a type other than T.
}
}
fn main() {
- function(22_usize, 22_usize);
+ function(22, 22);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: illegal recursive type
-
-
type x = Vec<x>;
+//~^ ERROR unsupported cyclic reference
fn main() { let b: x = Vec::new(); }
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// this has to be separate to internal-unstable.rs because these tests
+// have error messages pointing deep into the internals of the
+// cross-crate macros, and hence need to use error-pattern instead of
+// the // ~ form.
+
+// aux-build:internal_unstable.rs
+// error-pattern:use of unstable library feature 'function'
+// error-pattern:use of unstable library feature 'struct_field'
+// error-pattern:compilation successful
+#![feature(rustc_attrs)]
+
+#[macro_use]
+extern crate internal_unstable;
+
+#[rustc_error]
+fn main() {
+ call_unstable_noallow!();
+
+ construct_unstable_noallow!(0);
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:internal_unstable.rs
+
+#![feature(rustc_attrs)]
+#![allow(dead_code)]
+
+extern crate internal_unstable;
+
+
+thread_local!(static FOO: () = ());
+thread_local!(static BAR: () = internal_unstable::unstable()); //~ WARN use of unstable
+
+#[rustc_error]
+fn main() {} //~ ERROR
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:internal_unstable.rs
+
+#![feature(rustc_attrs, allow_internal_unstable)]
+
+#[macro_use]
+extern crate internal_unstable;
+
+macro_rules! foo {
+ ($e: expr, $f: expr) => {{
+ $e;
+ $f;
+ internal_unstable::unstable(); //~ WARN use of unstable
+ }}
+}
+
+#[allow_internal_unstable]
+macro_rules! bar {
+ ($e: expr) => {{
+ foo!($e,
+ internal_unstable::unstable());
+ internal_unstable::unstable();
+ }}
+}
+
+#[rustc_error]
+fn main() { //~ ERROR
+ // ok, the instability is contained.
+ call_unstable_allow!();
+ construct_unstable_allow!(0);
+
+ // bad.
+ pass_through_allow!(internal_unstable::unstable()); //~ WARN use of unstable
+
+ pass_through_noallow!(internal_unstable::unstable()); //~ WARN use of unstable
+
+
+
+ println!("{:?}", internal_unstable::unstable()); //~ WARN use of unstable
+
+ bar!(internal_unstable::unstable()); //~ WARN use of unstable
+}
// except according to those terms.
fn blah() -> i32 { //~ ERROR not all control paths return a value
- 1i32
+ 1
; //~ HELP consider removing this semicolon:
}
{
let cont_iter = cont.iter();
//~^ ERROR cannot infer an appropriate lifetime for autoref due to conflicting requirements
- let result = cont_iter.fold(Some(0u16), |state, val| {
+ let result = cont_iter.fold(Some(0), |state, val| {
state.map_or(None, |mask| {
let bit = 1 << val;
if mask & bit == 0 {Some(mask|bit)} else {None}
}
fn main() {
- check((3_usize, 5_usize));
+ check((3, 5));
//~^ ERROR mismatched types
//~| expected `&_`
-//~| found `(usize, usize)`
+//~| found `(_, _)`
//~| expected &-ptr
//~| found tuple
}
// The expected arm type `Option<T>` has one type parameter, while
// the actual arm `Result<T, E>` has two. typeck should not be
// tricked into looking up a non-existing second type parameter.
- let _x: usize = match Some(1_usize) {
+ let _x: usize = match Some(1) {
Ok(u) => u,
//~^ ERROR mismatched types
- //~| expected `core::option::Option<usize>`
+ //~| expected `core::option::Option<_>`
//~| found `core::result::Result<_, _>`
//~| expected enum `core::option::Option`
//~| found enum `core::result::Result`
Err(e) => panic!(e)
//~^ ERROR mismatched types
- //~| expected `core::option::Option<usize>`
+ //~| expected `core::option::Option<_>`
//~| found `core::result::Result<_, _>`
//~| expected enum `core::option::Option`
//~| found enum `core::result::Result`
//~| expected u8
//~| found array of 1 elements
- let local = [0u8];
+ let local = [0];
let _v = &local as *mut u8;
//~^ ERROR mismatched types
//~| expected `*mut u8`
- //~| found `&[u8; 1]`
+ //~| found `&[_; 1]`
//~| expected u8,
//~| found array of 1 elements
}
struct List<'a, T: ListItem<'a>> {
//~^ ERROR the parameter type `T` may not live long enough
-//~^^ HELP consider adding an explicit lifetime bound
-//~^^^ NOTE ...so that the reference type `&'a [T]` does not outlive the data it points at
+//~^^ NOTE ...so that the reference type `&'a [T]` does not outlive the data it points at
slice: &'a [T]
}
-
+//~^ HELP consider adding an explicit lifetime bound
impl<'a, T: ListItem<'a>> Collection for List<'a, T> {
fn len(&self) -> usize {
0
}
fn main() {
- let x = 1_usize;
+ let x = 1;
let y: Foo;
// `x { ... }` should not be interpreted as a struct literal here
fn main() {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
- (|| Box::new(*[0_usize].as_slice()))();
- //~^ ERROR the trait `core::marker::Sized` is not implemented for the type `[usize]`
+ (|| Box::new(*[0].as_slice()))();
+ //~^ ERROR the trait `core::marker::Sized` is not implemented for the type `[_]`
}
const A3: usize = 1;
fn main() {
- match 1_usize {
+ match 1 {
A1 => {} //~ ERROR: static variables cannot be referenced in a pattern
A2 => {} //~ ERROR: static variables cannot be referenced in a pattern
A3 => {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-pub static X: usize = 1_usize;
+pub static X: usize = 1;
fn main() {
- match 1_usize {
+ match 1 {
self::X => { },
//~^ ERROR static variables cannot be referenced in a pattern, use a `const` instead
_ => { },
AbstractRenderer
//~^ ERROR: the trait `core::marker::Sized` is not implemented
{
- match 0_usize {
+ match 0 {
_ => unimplemented!()
}
}
}
fn main() {
- let f = Foo::Variant(42_usize); //~ ERROR uses it like a function
+ let f = Foo::Variant(42); //~ ERROR uses it like a function
}
use std::any::Any;
use std::any::TypeId;
+use std::marker::MarkerTrait;
-pub trait Pt {}
-pub trait Rt {}
+pub trait Pt : MarkerTrait {}
+pub trait Rt : MarkerTrait {}
trait Private<P: Pt, R: Rt> {
fn call(&self, p: P, r: R);
}
-pub trait Public: Private<
+pub trait Public: Private< //~ ERROR private trait in exported type parameter bound
<Self as Public>::P,
-//~^ ERROR illegal recursive type; insert an enum or struct in the cycle, if this is desired
<Self as Public>::R
-//~^ ERROR unsupported cyclic reference between types/traits detected
> {
type P;
type R;
}
fn main() {
- let s = &mut 1_usize;
+ let s = &mut 1;
MyPtr(s).poke(s);
//~^ ERROR cannot borrow `*s` as mutable more than once at a time
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
fn main() {
- let mut y = 1_usize;
+ let mut y = 1;
let c = RefCell::new(vec![]);
c.push(Box::new(|| y = 0));
c.push(Box::new(|| y = 0));
}
fn ufcs() {
- let mut y = 1_usize;
+ let mut y = 1;
let c = RefCell::new(vec![]);
Push::push(&c, Box::new(|| y = 0));
#[inline(never)]
fn foo(b: &Bar) {
- b.foo(&0usize)
+ b.foo(&0)
//~^ ERROR the trait `Foo` is not implemented for the type `Bar`
}
fn main() {
if let Some(homura) = Some("madoka") { //~ ERROR missing an else clause
//~| expected `()`
- //~| found `i32`
+ //~| found `_`
//~| expected ()
- //~| found i32
- 765i32
+ //~| found integral variable
+ 765
};
}
}
fn mut_ptr() -> *mut T {
- unsafe { 0u8 as *mut T }
+ unsafe { 0 as *mut T }
}
fn const_ptr() -> *const T {
- unsafe { 0u8 as *const T }
+ unsafe { 0 as *const T }
}
pub fn main() {
fn fail_len(v: Vec<isize> ) -> usize {
let mut i = 3;
panic!();
- for x in &v { i += 1_usize; }
+ for x in &v { i += 1; }
//~^ ERROR: unreachable statement
return i;
}
+++ /dev/null
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-tidy-linelength
-
-use std::cmp::PartialEq;
-
-trait Hahaha: PartialEq + PartialEq {
- //~^ ERROR trait `PartialEq` already appears in the list of bounds
-}
-
-struct Lol(isize);
-
-impl Hahaha for Lol { }
-
-impl PartialEq for Lol {
- fn eq(&self, other: &Lol) -> bool { **self != **other }
- fn ne(&self, other: &Lol) -> bool { **self == **other }
-}
-
-fn main() {
- if Lol(2) == Lol(4) {
- println!("2 == 4");
- } else {
- println!("2 != 4");
- }
-}
fn bar(int_param: usize) {}
fn main() {
- let foo: [u8; 4] = [1u8; 4_usize];
+ let foo: [u8; 4] = [1; 4];
bar(foo);
//~^ ERROR mismatched types
//~| expected `usize`
impl CtxtFn for usize {
fn f8(self, i: usize) -> usize {
- i * 4_usize
+ i * 4
}
fn f9(i: usize) -> usize {
- i * 4_usize
+ i * 4
}
}
impl OtherTrait for usize {
fn f9(i: usize) -> usize {
- i * 8_usize
+ i * 8
}
}
_ => ()
}
- match &Some(42i32) {
+ match &Some(42) {
Some(x) => (),
//~^ ERROR mismatched types
- //~| expected `&core::option::Option<i32>`
+ //~| expected `&core::option::Option<_>`
//~| found `core::option::Option<_>`
//~| expected &-ptr
//~| found enum `core::option::Option`
None => ()
//~^ ERROR mismatched types
- //~| expected `&core::option::Option<i32>`
+ //~| expected `&core::option::Option<_>`
//~| found `core::option::Option<_>`
//~| expected &-ptr
//~| found enum `core::option::Option`
fn bar<F:FnOnce() + Send>(_: F) { }
fn main() {
- let x = Rc::new(3_usize);
+ let x = Rc::new(3);
bar(move|| foo(x));
//~^ ERROR `core::marker::Send` is not implemented
}
fn check<'r, I: Iterator<Item=usize>, T: Itble<'r, usize, I>>(cont: &T) -> bool {
//~^ HELP: consider using an explicit lifetime parameter as shown: fn check<'r, I: Iterator<Item = usize>, T: Itble<'r, usize, I>>(cont: &'r T)
let cont_iter = cont.iter(); //~ ERROR: cannot infer
- let result = cont_iter.fold(Some(0u16), |state, val| {
+ let result = cont_iter.fold(Some(0), |state, val| {
state.map_or(None, |mask| {
let bit = 1 << val;
if mask & bit == 0 {Some(mask|bit)} else {None}
fn main() {
field_read(Foo { x: 1, b: false, marker: std::marker::NoCopy });
field_match_in_patterns(XYZ::Z);
- field_match_in_let(Bar { x: 42_usize, b: true, _guard: () });
+ field_match_in_let(Bar { x: 42, b: true, _guard: () });
let _ = Baz { x: 0 };
}
//
// regression test for #8005
-macro_rules! test { () => { fn foo() -> i32 { 1i32; } } }
+macro_rules! test { () => { fn foo() -> i32 { 1; } } }
//~^ ERROR not all control paths return a value
//~^^ HELP consider removing this semicolon
extern crate macro_non_reexport_2;
fn main() {
- assert_eq!(reexported!(), 3_usize); //~ ERROR macro undefined
+ assert_eq!(reexported!(), 3); //~ ERROR macro undefined
}
extern crate macro_reexport_1;
fn main() {
- assert_eq!(reexported!(), 3_usize); //~ ERROR macro undefined
+ assert_eq!(reexported!(), 3); //~ ERROR macro undefined
}
//~| found `Foo`
//~| expected &-ptr
//~| found struct `Foo`
- Foo::bar(&42i32); //~ ERROR mismatched types
+ Foo::bar(&42); //~ ERROR mismatched types
//~| expected `&Foo`
- //~| found `&i32`
+ //~| found `&_`
//~| expected struct `Foo`
- //~| found i32
+ //~| found integral variable
}
// except according to those terms.
fn main() {
- let foo = &mut 1i32;
+ let foo = &mut 1;
// (separate lines to ensure the spans are accurate)
let &_ //~ ERROR mismatched types
- //~| expected `&mut i32`
+ //~| expected `&mut _`
//~| found `&_`
//~| values differ in mutability
= foo;
let &mut _ = foo;
- let bar = &1i32;
+ let bar = &1;
let &_ = bar;
let &mut _ //~ ERROR mismatched types
- //~| expected `&i32`
+ //~| expected `&_`
//~| found `&mut _`
//~| values differ in mutability
= bar;
}
fn main() {
- let nyan : cat = cat(52_usize, 99);
+ let nyan : cat = cat(52, 99);
nyan.eat();
}
}
fn main() {
- let nyan : cat = cat(52_usize, 99);
+ let nyan : cat = cat(52, 99);
nyan.how_hungry = 0; //~ ERROR cannot assign
}
Foo { first: true, second: None } => (),
Foo { first: true, second: Some(_) } => (),
Foo { first: false, second: None } => (),
- Foo { first: false, second: Some([1_usize, 2_usize, 3_usize, 4_usize]) } => ()
+ Foo { first: false, second: Some([1, 2, 3, 4]) } => ()
}
}
enum blah { a(isize, isize, usize), b(isize, isize), }
-fn main() { match blah::a(1, 1, 2_usize) { blah::a(_, x, y) | blah::b(x, y) => { } } }
+fn main() { match blah::a(1, 1, 2) { blah::a(_, x, y) | blah::b(x, y) => { } } }
}
fn main() {
- let nyan : kitties::cat = kitties::cat(52_usize, 99);
+ let nyan : kitties::cat = kitties::cat(52, 99);
nyan.nap();
}
use cci_class::kitties::cat;
fn main() {
- let nyan : cat = cat(52_usize, 99);
- assert!((nyan.meows == 52_usize));
+ let nyan : cat = cat(52, 99);
+ assert!((nyan.meows == 52));
//~^ ERROR field `meows` of struct `cci_class::kitties::cat` is private
}
//~^ ERROR the trait `core::num::Int` is not implemented for the type `f32`
// Unsized type.
- let arr: &[_] = &[1u32, 2, 3];
+ let arr: &[_] = &[1, 2, 3];
let range = *arr..;
//~^ ERROR the trait `core::marker::Sized` is not implemented
}
impl dog {
pub fn chase_cat(&mut self) {
let p: &'static mut usize = &mut self.cats_chased; //~ ERROR cannot infer
- *p += 1_usize;
+ *p += 1;
}
pub fn chase_cat_2(&mut self) {
let p: &mut usize = &mut self.cats_chased;
- *p += 1_usize;
+ *p += 1;
}
}
fn dog() -> dog {
dog {
- cats_chased: 0_usize
+ cats_chased: 0
}
}
pub fn chase_cat(&mut self) {
let _f = || {
let p: &'static mut usize = &mut self.food; //~ ERROR cannot infer
- *p = 3_usize;
+ *p = 3;
};
}
}
}
fn build() {
- let x = ast::num(3_usize);
- let y = ast::num(4_usize);
+ let x = ast::num(3);
+ let y = ast::num(4);
let z = ast::add(&x, &y);
compute(&z);
}
// except according to those terms.
fn main() {
- let a0 = 0u8;
- let f = 1u8;
+ let a0 = 0;
+ let f = 1;
let mut a1 = &a0;
match (&a1,) {
(&ref b0,) => {
fn main() {
// Unboxed closure case
{
- let mut x = 0_usize;
+ let mut x = 0;
let mut f = || &mut x; //~ ERROR cannot infer
let x = f();
let y = f();
}
fn main() {
- let ctxt = ctxt { v: 22_usize };
+ let ctxt = ctxt { v: 22 };
let hc = has_ctxt { c: &ctxt };
- assert_eq!(get_v(box hc as Box<get_ctxt>), 22_usize);
+ assert_eq!(get_v(box hc as Box<get_ctxt>), 22);
}
let pt = PointF {
//~^ ERROR structure constructor specifies a structure of type
//~| expected f32
- //~| found i32
- x: 1i32,
- y: 2i32,
+ //~| found integral variable
+ x: 1,
+ y: 2,
};
let pt2 = Point::<f32> {
//~^ ERROR structure constructor specifies a structure of type
//~| expected f32
- //~| found i32
- x: 3i32,
- y: 4i32,
+ //~| found integral variable
+ x: 3,
+ y: 4,
};
let pair = PairF {
//~^ ERROR structure constructor specifies a structure of type
//~| expected f32
- //~| found i32
- x: 5i32,
- y: 6i32,
+ //~| found integral variable
+ x: 5,
+ y: 6,
};
let pair2 = PairF::<i32> {
//~^ ERROR structure constructor specifies a structure of type
//~| expected f32
- //~| found i32
- x: 7i32,
- y: 8i32,
+ //~| found integral variable
+ x: 7,
+ y: 8,
};
let pt3 = PointF::<i32> {
//~^ ERROR wrong number of type arguments
//~| ERROR structure constructor specifies a structure of type
- x: 9i32,
- y: 10i32,
+ x: 9,
+ y: 10,
};
}
fn f() -> isize { return g(); }
-fn g() -> usize { return 0_usize; }
+fn g() -> usize { return 0; }
fn main() { let y = f(); }
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test case where an associated type is referenced from within the
+// supertrait definition, and the impl makes the wrong
+// associations. Issue #20220.
+
+use std::vec::IntoIter;
+
+pub trait Foo: Iterator<Item=<Self as Foo>::Key> {
+ type Key;
+}
+
+impl Foo for IntoIter<i32> { //~ ERROR type mismatch
+ type Key = u32;
+}
+
+fn main() {
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// A variant of traits-issue-23003 in which an infinite series of
+// types are required. This currently creates an overflow. This test
+// is included to ensure that some controlled failure, at least,
+// results -- but it might be that we should adjust the rules somewhat
+// to make this legal. -nmatsakis
+
+use std::marker::PhantomData;
+
+trait Async {
+ type Cancel;
+}
+
+struct Receipt<A:Async> {
+ marker: PhantomData<A>,
+}
+
+struct Complete<B> {
+ core: Option<B>,
+}
+
+impl<B> Async for Complete<B> {
+ type Cancel = Receipt<Complete<Option<B>>>;
+}
+
+fn foo(r: Receipt<Complete<()>>) { }
+//~^ ERROR overflow
+
+fn main() { }
}
fn a() {
- test(22_i32, std::default::Default::default()); //~ ERROR type annotations required
+ test(22, std::default::Default::default()); //~ ERROR type annotations required
}
fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test a case of a trait which extends the same supertrait twice, but
+// with difference type parameters. Test then that when we don't give
+// enough information to pick between these, no selection is made. In
+// this particular case, the two choices are i64/u64 -- so when we use
+// an integer literal, we wind up falling this literal back to i32.
+// See also `run-pass/trait-repeated-supertrait.rs`.
+
+trait CompareTo<T> {
+ fn same_as(&self, t: T) -> bool;
+}
+
+trait CompareToInts : CompareTo<i64> + CompareTo<u64> {
+}
+
+impl CompareTo<i64> for i64 {
+ fn same_as(&self, t: i64) -> bool { *self == t }
+}
+
+impl CompareTo<u64> for i64 {
+ fn same_as(&self, t: u64) -> bool { *self == (t as i64) }
+}
+
+impl CompareToInts for i64 { }
+
+fn with_obj(c: &CompareToInts) -> bool {
+ c.same_as(22) //~ ERROR `CompareTo<i32>` is not implemented
+}
+
+fn with_trait<C:CompareToInts>(c: &C) -> bool {
+ c.same_as(22) //~ ERROR `CompareTo<i32>` is not implemented
+}
+
+fn with_ufcs1<C:CompareToInts>(c: &C) -> bool {
+ CompareToInts::same_as(c, 22) //~ ERROR `CompareTo<i32>` is not implemented
+}
+
+fn with_ufcs2<C:CompareToInts>(c: &C) -> bool {
+ CompareTo::same_as(c, 22) //~ ERROR `CompareTo<i32>` is not implemented
+}
+
+fn main() {
+ assert_eq!(22_i64.same_as(22), true); //~ ERROR `CompareTo<i32>` is not implemented
+}
struct Point(i32, i32);
fn main() {
- let origin = Point(0i32, 0i32);
+ let origin = Point(0, 0);
origin.0;
origin.1;
origin.2;
//~^ ERROR attempted out-of-bounds tuple index `2` on type `Point`
- let tuple = (0i32, 0i32);
+ let tuple = (0, 0);
tuple.0;
tuple.1;
tuple.2;
- //~^ ERROR attempted out-of-bounds tuple index `2` on type `(i32, i32)`
+ //~^ ERROR attempted out-of-bounds tuple index `2` on type `(_, _)`
}
// Checking that the compiler reports multiple type errors at once
-fn main() { let a: bool = 1i32; let b: i32 = true; }
+fn main() { let a: bool = 1; let b: i32 = true; }
//~^ ERROR mismatched types
//~| expected `bool`
-//~| found `i32`
+//~| found `_`
//~| expected bool
-//~| found i32
+//~| found integral variable
//~| ERROR mismatched types
//~| expected `i32`
//~| found `bool`
impl<T: Int> BrokenAdd for T {}
pub fn main() {
- let foo: u8 = 0u8;
+ let foo: u8 = 0;
let x: u8 = foo.broken_add("hello darkness my old friend".to_string());
println!("{}", x);
}
static TEST3: _ = "test";
//~^ ERROR the type placeholder `_` is not allowed within types on item signatures
-static TEST4: _ = 145u16;
+static TEST4: _ = 145;
//~^ ERROR the type placeholder `_` is not allowed within types on item signatures
static TEST5: (_, _) = (1, 2);
static FN_TEST3: _ = "test";
//~^ ERROR the type placeholder `_` is not allowed within types on item signatures
- static FN_TEST4: _ = 145u16;
+ static FN_TEST4: _ = 145;
//~^ ERROR the type placeholder `_` is not allowed within types on item signatures
static FN_TEST5: (_, _) = (1, 2);
fn main() {
// By-ref cases
{
- let x = Box::new(0_usize);
+ let x = Box::new(0);
let f = to_fn(|| drop(x)); //~ ERROR cannot move
}
{
- let x = Box::new(0_usize);
+ let x = Box::new(0);
let f = to_fn_mut(|| drop(x)); //~ ERROR cannot move
}
{
- let x = Box::new(0_usize);
+ let x = Box::new(0);
let f = to_fn_once(|| drop(x)); // OK -- FnOnce
}
// By-value cases
{
- let x = Box::new(0_usize);
+ let x = Box::new(0);
let f = to_fn(move || drop(x)); //~ ERROR cannot move
}
{
- let x = Box::new(0_usize);
+ let x = Box::new(0);
let f = to_fn_mut(move || drop(x)); //~ ERROR cannot move
}
{
- let x = Box::new(0_usize);
+ let x = Box::new(0);
let f = to_fn_once(move || drop(x)); // this one is ok
}
}
fn set(x: &mut usize) { *x = 0; }
fn main() {
- let x = 0_usize;
+ let x = 0;
move || x = 1; //~ ERROR cannot assign
move || set(&mut x); //~ ERROR cannot borrow
move || x = 1; //~ ERROR cannot assign
// reference cannot escape the region of that variable.
fn main() {
let _f = {
- let x = 0_usize;
+ let x = 0;
|| x //~ ERROR `x` does not live long enough
};
}
// cause borrow conflicts.
fn main() {
- let mut x = 0_usize;
+ let mut x = 0;
let f = || x += 1;
let _y = x; //~ ERROR cannot use `x` because it was mutably borrowed
}
fn to_fn_mut<A,F:FnMut<A>>(f: F) -> F { f }
fn a() {
- let n = 0u8;
+ let n = 0;
let mut f = to_fn_mut(|| { //~ ERROR closure cannot assign
n += 1;
});
}
fn b() {
- let mut n = 0u8;
+ let mut n = 0;
let mut f = to_fn_mut(|| {
n += 1; // OK
});
}
fn c() {
- let n = 0u8;
+ let n = 0;
let mut f = to_fn_mut(move || {
// If we just did a straight-forward desugaring, this would
// compile, but we do something a bit more subtle, and hence
}
fn d() {
- let mut n = 0u8;
+ let mut n = 0;
let mut f = to_fn_mut(move || {
n += 1; // OK
});
}
fn e() {
- let n = 0u8;
+ let n = 0;
let mut f = to_fn(move || {
n += 1; //~ ERROR cannot assign
});
}
fn f() {
- let mut n = 0u8;
+ let mut n = 0;
let mut f = to_fn(move || {
n += 1; //~ ERROR cannot assign
});
}
fn main() {
- let mut counter = 0_u32;
+ let mut counter = 0;
call(|| {
counter += 1;
//~^ ERROR cannot assign to data in a captured outer variable in an `Fn` closure
enum foo { a(Box<foo>, isize), b(usize), }
-fn main() { match foo::b(1_usize) { foo::b(_) | foo::a(box _, 1) => { } foo::a(_, 1) => { } } }
+fn main() { match foo::b(1) { foo::b(_) | foo::a(box _, 1) => { } foo::a(_, 1) => { } } }
fn f(p: *const u8) {
- *p = 0u8; //~ ERROR dereference of unsafe pointer requires unsafe function or block
+ *p = 0; //~ ERROR dereference of unsafe pointer requires unsafe function or block
return;
}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that bounds are sized-compatible.
-
-trait T : Sized {}
-fn f<Y: ?Sized + T>() {
-//~^ERROR incompatible bounds on `Y`, bound `T` does not allow unsized type
-}
-
-pub fn main() {
-}
fn main() {
{
- let a = AffineU32(1_u32);
+ let a = AffineU32(1);
let x = foo(&a);
drop(a); //~ ERROR cannot move out of `a`
drop(x);
}
{
- let a = AffineU32(1_u32);
+ let a = AffineU32(1);
let x = bar(&a);
drop(a); //~ ERROR cannot move out of `a`
drop(x);
}
{
- let a = AffineU32(1_u32);
+ let a = AffineU32(1);
let x = baz(&a);
drop(a); //~ ERROR cannot move out of `a`
drop(x);
}
fn call_it<B:TraitB>(b: B) -> isize {
- let y = 4_usize;
+ let y = 4;
b.gimme_an_a(y) //~ ERROR the trait `TraitA` is not implemented
}
}
fn main() {
- assoc_struct(Struct { b: -1i32, b1: 0i64 });
- assoc_local(1i32);
- assoc_arg::<i32>(2i64);
- assoc_return_value(3i32);
- assoc_tuple((4i32, 5i64));
- assoc_enum(Enum::Variant1(6i32, 7i64));
- assoc_enum(Enum::Variant2(8i64, 9i32));
+ assoc_struct(Struct { b: -1, b1: 0 });
+ assoc_local(1);
+ assoc_arg::<i32>(2);
+ assoc_return_value(3);
+ assoc_tuple((4, 5));
+ assoc_enum(Enum::Variant1(6, 7));
+ assoc_enum(Enum::Variant2(8, 9));
}
fn zzz() { () }
--- /dev/null
+// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![omit_gdb_pretty_printer_section]
+
+// ignore-android: FIXME(#10381)
+// min-lldb-version: 310
+
+// aux-build:cross_crate_spans.rs
+extern crate cross_crate_spans;
+
+// compile-flags:-g
+
+
+// === GDB TESTS ===================================================================================
+
+// gdb-command:break cross_crate_spans.rs:21
+// gdb-command:run
+
+// gdb-command:print result
+// gdb-check:$1 = {17, 17}
+// gdb-command:print a_variable
+// gdb-check:$2 = 123456789
+// gdb-command:print another_variable
+// gdb-check:$3 = 123456789.5
+// gdb-command:continue
+
+// gdb-command:print result
+// gdb-check:$4 = {1212, 1212}
+// gdb-command:print a_variable
+// gdb-check:$5 = 123456789
+// gdb-command:print another_variable
+// gdb-check:$6 = 123456789.5
+// gdb-command:continue
+
+
+
+// === LLDB TESTS ==================================================================================
+
+// lldb-command:b cross_crate_spans.rs:21
+// lldb-command:run
+
+// lldb-command:print result
+// lldb-check:[...]$0 = (17, 17)
+// lldb-command:print a_variable
+// lldb-check:[...]$1 = 123456789
+// lldb-command:print another_variable
+// lldb-check:[...]$2 = 123456789.5
+// lldb-command:continue
+
+// lldb-command:print result
+// lldb-check:[...]$3 = (1212, 1212)
+// lldb-command:print a_variable
+// lldb-check:[...]$4 = 123456789
+// lldb-command:print another_variable
+// lldb-check:[...]$5 = 123456789.5
+// lldb-command:continue
+
+
+// This test makes sure that we can break in functions inlined from other crates.
+
+fn main() {
+
+ let _ = cross_crate_spans::generic_function(17u32);
+ let _ = cross_crate_spans::generic_function(1212i16);
+
+}
next: Val {
val: box UniqueNode {
next: Empty,
- value: 1_u16,
+ value: 1,
}
},
- value: 0_u16,
+ value: 0,
};
let unique_unique: Box<UniqueNode<u32>> = box UniqueNode {
fn main() {
- let vi8x16 = i8x16(0i8, 1i8, 2i8, 3i8, 4i8, 5i8, 6i8, 7i8,
- 8i8, 9i8, 10i8, 11i8, 12i8, 13i8, 14i8, 15i8);
-
- let vi16x8 = i16x8(16i16, 17i16, 18i16, 19i16, 20i16, 21i16, 22i16, 23i16);
- let vi32x4 = i32x4(24i32, 25i32, 26i32, 27i32);
- let vi64x2 = i64x2(28i64, 29i64);
-
- let vu8x16 = u8x16(30u8, 31u8, 32u8, 33u8, 34u8, 35u8, 36u8, 37u8,
- 38u8, 39u8, 40u8, 41u8, 42u8, 43u8, 44u8, 45u8);
- let vu16x8 = u16x8(46u16, 47u16, 48u16, 49u16, 50u16, 51u16, 52u16, 53u16);
- let vu32x4 = u32x4(54u32, 55u32, 56u32, 57u32);
- let vu64x2 = u64x2(58u64, 59u64);
+ let vi8x16 = i8x16(0, 1, 2, 3, 4, 5, 6, 7,
+ 8, 9, 10, 11, 12, 13, 14, 15);
+
+ let vi16x8 = i16x8(16, 17, 18, 19, 20, 21, 22, 23);
+ let vi32x4 = i32x4(24, 25, 26, 27);
+ let vi64x2 = i64x2(28, 29);
+
+ let vu8x16 = u8x16(30, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 40, 41, 42, 43, 44, 45);
+ let vu16x8 = u16x8(46, 47, 48, 49, 50, 51, 52, 53);
+ let vu32x4 = u32x4(54, 55, 56, 57);
+ let vu64x2 = u64x2(58, 59);
let vf32x4 = f32x4(60.5f32, 61.5f32, 62.5f32, 63.5f32);
let vf64x2 = f64x2(64.5f64, 65.5f64);
// except according to those terms.
#[path = "circular_modules_hello.rs"]
-mod circular_modules_hello; //~ERROR: circular modules
+mod circular_modules_hello; //~ ERROR: circular modules
pub fn hi_str() -> String {
- "Hi!".to_string()
+ "Hi!".to_string()
}
fn main() {
}
fn main() {
- let nyan = cat(0us);
+ let nyan = cat(0);
}
// except according to those terms.
fn main() {
- let __isize = 0xff_ffff_ffff_ffff_ffff__isize;
+ let __isize = 0xff_ffff_ffff_ffff_ffff;
//~^ ERROR int literal is too large
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
+// ignore-windows
+// ignore-freebsd
#[path = "../compile-fail"]
mod foo; //~ ERROR: a directory
1e+; //~ ERROR: expected at least one digit in exponent
0x539.0; //~ ERROR: hexadecimal float literal is not supported
99999999999999999999999999999999; //~ ERROR: int literal is too large
- 99999999999999999999999999999999u32; //~ ERROR: int literal is too large
+ 99999999999999999999999999999999; //~ ERROR: int literal is too large
0x; //~ ERROR: no valid digits
0xu32; //~ ERROR: no valid digits
0ou32; //~ ERROR: no valid digits
}
fn make_gc() -> @get_ctxt {
- let ctxt = ctxt { v: 22us };
+ let ctxt = ctxt { v: 22 };
let hc = has_ctxt { c: &ctxt };
return @hc as @get_ctxt;
//~^ ERROR source contains reference
fn a() -> uint {
- 1usize
+ 1
}
const FOO: usize = ((5 as usize) - (4 as usize) as usize);
let _: [(); (FOO as usize)] = ([(() as ())] as [(); 1]);
- let _: [(); (1usize as usize)] = ([(() as ())] as [(); 1]);
+ let _: [(); (1 as usize)] = ([(() as ())] as [(); 1]);
let _ =
(((&((([(1 as i32), (2 as i32), (3 as i32)] as [i32; 3])) as [i32; 3])
as &[i32; 3]) as *const _ as *const [i32; 3]) as
- *const [i32; (3usize as usize)] as *const [i32; 3]);
+ *const [i32; (3 as usize)] as *const [i32; 3]);
const FOO: usize = 5 - 4;
let _: [(); FOO] = [()];
- let _ : [(); 1usize] = [()];
+ let _ : [(); 1] = [()];
- let _ = &([1,2,3]) as *const _ as *const [i32; 3usize];
+ let _ = &([1,2,3]) as *const _ as *const [i32; 3];
format!("test");
}
}
extern fn cb(data: libc::uintptr_t) -> libc::uintptr_t {
- if data == 1_usize {
+ if data == 1 {
data
} else {
- count(data - 1_usize) + count(data - 1_usize)
+ count(data - 1) + count(data - 1)
}
}
}
fn main() {
- for _ in 0..10_usize {
+ for _ in 0..10 {
task::spawn(move|| {
- let result = count(5_usize);
+ let result = count(5);
println!("result = %?", result);
panic!();
});
// error-pattern:Number is odd
fn even(x: uint) -> bool {
- if x < 2_usize {
+ if x < 2 {
return false;
- } else if x == 2_usize { return true; } else { return even(x - 2_usize); }
+ } else if x == 2 { return true; } else { return even(x - 2); }
}
fn foo(x: uint) {
}
}
-fn main() { foo(3_usize); }
+fn main() { foo(3); }
// except according to those terms.
// error-pattern:thread '<main>' panicked at 'arithmetic operation overflowed'
+// compile-flags: -C debug-assertions
// (Work around constant-evaluation)
fn value() -> u8 { 200 }
// except according to those terms.
// error-pattern:thread '<main>' panicked at 'arithmetic operation overflowed'
+// compile-flags: -C debug-assertions
// (Work around constant-evaluation)
fn value() -> u8 { 200 }
// except according to those terms.
// error-pattern:thread '<main>' panicked at 'arithmetic operation overflowed'
+// compile-flags: -C debug-assertions
// (Work around constant-evaluation)
fn value() -> u8 { 42 }
--- /dev/null
+-include ../tools.mk
+
+all:
+ $(RUSTC) debug.rs -C debug-assertions=no
+ $(call RUN,debug) good
+ $(RUSTC) debug.rs -C opt-level=0
+ $(call RUN,debug) bad
+ $(RUSTC) debug.rs -C opt-level=1
+ $(call RUN,debug) good
+ $(RUSTC) debug.rs -C opt-level=2
+ $(call RUN,debug) good
+ $(RUSTC) debug.rs -C opt-level=3
+ $(call RUN,debug) good
+ $(RUSTC) debug.rs -O
+ $(call RUN,debug) good
+ $(RUSTC) debug.rs
+ $(call RUN,debug) bad
+ $(RUSTC) debug.rs -C debug-assertions=yes -O
+ $(call RUN,debug) bad
+ $(RUSTC) debug.rs -C debug-assertions=yes -C opt-level=1
+ $(call RUN,debug) bad
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(warnings)]
+
+use std::env;
+use std::thread;
+
+fn main() {
+ let should_fail = env::args().nth(1) == Some("bad".to_string());
+
+ assert_eq!(thread::spawn(debug_assert_eq).join().is_err(), should_fail);
+ assert_eq!(thread::spawn(debug_assert).join().is_err(), should_fail);
+ assert_eq!(thread::spawn(overflow).join().is_err(), should_fail);
+}
+
+fn debug_assert_eq() {
+ let mut hit1 = false;
+ let mut hit2 = false;
+ debug_assert_eq!({ hit1 = true; 1 }, { hit2 = true; 2 });
+ assert!(!hit1);
+ assert!(!hit2);
+}
+
+fn debug_assert() {
+ let mut hit = false;
+ debug_assert!({ hit = true; false });
+ assert!(!hit);
+}
+
+fn overflow() {
+ fn add(a: u8, b: u8) -> u8 { a + b }
+
+ add(200u8, 200u8);
+}
digraph block {
N0[label="entry"];
N1[label="exit"];
- N2[label="expr 2usize"];
- N3[label="expr 0usize"];
- N4[label="expr 20usize"];
- N5[label="expr [2usize, 0usize, 20usize]"];
+ N2[label="expr 2"];
+ N3[label="expr 0"];
+ N4[label="expr 20"];
+ N5[label="expr [2, 0, 20]"];
N6[label="local v"];
- N7[label="stmt let v = [2usize, 0usize, 20usize];"];
+ N7[label="stmt let v = [2, 0, 20];"];
N8[label="expr v"];
- N9[label="expr 20usize"];
- N10[label="expr v[20usize]"];
- N11[label="stmt v[20usize];"];
- N12[label="block { let v = [2usize, 0usize, 20usize]; v[20usize]; }"];
+ N9[label="expr 20"];
+ N10[label="expr v[20]"];
+ N11[label="stmt v[20];"];
+ N12[label="block { let v = [2, 0, 20]; v[20]; }"];
N0 -> N2;
N2 -> N3;
N3 -> N4;
// except according to those terms.
pub fn expr_index_20() {
- let v = [2_usize, 0_usize, 20_usize];
- v[20_usize];
+ let v = [2, 0, 20];
+ v[20];
}
use rustc_driver::driver::{compile_input, CompileController};
use syntax::diagnostics::registry::Registry;
+use std::path::PathBuf;
+
fn main() {
let src = r#"
fn main() {}
panic!("expected rustc path");
}
- let tmpdir = Path::new(&args[1]);
+ let tmpdir = PathBuf::new(&args[1]);
- let mut sysroot = Path::new(&args[3]);
+ let mut sysroot = PathBuf::new(&args[3]);
sysroot.pop();
sysroot.pop();
compile(src.to_string(), tmpdir.join("out"), sysroot.clone());
}
-fn basic_sess(sysroot: Path) -> Session {
+fn basic_sess(sysroot: PathBuf) -> Session {
let mut opts = basic_options();
opts.output_types = vec![OutputTypeExe];
opts.maybe_sysroot = Some(sysroot);
sess
}
-fn compile(code: String, output: Path, sysroot: Path) {
+fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
let sess = basic_sess(sysroot);
let cfg = build_configuration(&sess);
let control = CompileController::basic();
// buglink test - see issue #1337.
fn test_alias<I: Iterator>(i: Option<<I as Iterator>::Item>) {
- let s = sub_struct{ field2: 45u32, };
+ let s = sub_struct{ field2: 45, };
// import tests
fn foo(x: &Float) {}
let _: Option<u8> = from_i32(45);
- let x = 42_usize;
+ let x = 42;
myflate::deflate_bytes(&[]);
- let x = (3, 4_usize);
+ let x = (3, 4);
let y = x.1;
}
pub fn dummy() {
// force the vtable to be created
- let _x = &1_usize as &Foo;
+ let _x = &1 as &Foo;
}
use rustc_driver::{driver, CompilerCalls, Compilation};
use syntax::diagnostics;
+use std::path::PathBuf;
struct TestCalls {
count: u32
_: &getopts::Matches,
_: &Session,
_: &Input,
- _: &Option<Path>,
- _: &Option<Path>)
+ _: &Option<PathBuf>,
+ _: &Option<PathBuf>)
-> Compilation {
self.count *= 3;
Compilation::Stop
}
- fn some_input(&mut self, input: Input, input_path: Option<Path>) -> (Input, Option<Path>) {
+ fn some_input(&mut self, input: Input, input_path: Option<PathBuf>)
+ -> (Input, Option<PathBuf>) {
self.count *= 5;
(input, input_path)
}
fn no_input(&mut self,
_: &getopts::Matches,
_: &config::Options,
- _: &Option<Path>,
- _: &Option<Path>,
+ _: &Option<PathBuf>,
+ _: &Option<PathBuf>,
_: &diagnostics::registry::Registry)
- -> Option<(Input, Option<Path>)> {
+ -> Option<(Input, Option<PathBuf>)> {
panic!("This shouldn't happen");
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:procedural_mbe_matching.rs
+// ignore-stage1
+
+#![feature(plugin)]
+#![plugin(procedural_mbe_matching)]
+
+#[no_link]
+extern crate procedural_mbe_matching;
+
+pub fn main() {
+ let abc = 123u32;
+ assert_eq!(matches!(Some(123), None | Some(0)), false);
+ assert_eq!(matches!(Some(123), None | Some(123)), true);
+ assert_eq!(matches!(true, true), true);
+}
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
let arm = quote_arm!(cx, (ref x, ref y) => (x, y));
check_pp(ext_cx, arm, pprust::print_stmt, "(ref x, ref y) = (x, y)".to_string());
+
+ let attr = quote_attr!(cx, #![cfg(foo = "bar")]);
+ check_pp(ext_cx, attr, pprust::print_attribute, "#![cfg(foo = "bar")]".to_string());
}
fn check_pp<T>(cx: fake_ext_ctxt,
let _k: P<syntax::ast::Method> = quote_method!(cx, #[doc = "hello"] fn foo(&self) {});
let _l: P<syntax::ast::Ty> = quote_ty!(cx, &int);
+
+ let _m: Vec<syntax::ast::TokenTree> = quote_matcher!(cx, $($foo:tt,)* bar);
+ let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]);
+
+ let _o: Option<P<syntax::ast::Item>> = quote_item!(cx, fn foo<T: ?Sized>() {});
}
fn main() {
struct RawT {struct_: sty, cname: Option<String>, hash: uint}
fn mk_raw_ty(st: sty, cname: Option<String>) -> RawT {
- return RawT {struct_: st, cname: cname, hash: 0_usize};
+ return RawT {struct_: st, cname: cname, hash: 0};
}
pub fn main() { mk_raw_ty(sty::ty_nil, None::<String>); }
fn main() {
let x = get(22);
- assert_eq!(22_usize, x);
+ assert_eq!(22, x);
}
pub fn main() {
let a = 42;
- assert!(foo2(a) == 42_usize);
+ assert!(foo2(a) == 42);
let a = Bar;
assert!(foo2(a) == 43);
pub fn main() {
let node: Node<i32> = Node { key: 1, value: Some(22) };
- assert_eq!(foo(&node), Some(22_u32));
+ assert_eq!(foo(&node), Some(22));
let node: Node<u32> = Node { key: 1, value: Some(22) };
- assert_eq!(foo(&node), Some(22_i32));
+ assert_eq!(foo(&node), Some(22));
}
pub fn main() {
let node: Node<i32> = Node(1, Some(22));
- assert_eq!(foo(&node), Some(22_u32));
+ assert_eq!(foo(&node), Some(22));
let node: Node<u32> = Node(1, Some(22));
- assert_eq!(foo(&node), Some(22_i32));
+ assert_eq!(foo(&node), Some(22));
}
}
pub fn main() {
- let z: uint = bar(2, 4_usize);
+ let z: uint = bar(2, 4);
}
enum CLike { A, B, C }
pub fn main() {
- let a = &Plus(@Minus(@Val(3_usize), @Val(10_usize)), @Plus(@Val(22_usize), @Val(5_usize)));
+ let a = &Plus(@Minus(@Val(3), @Val(10)), @Plus(@Val(22), @Val(5)));
test_rbml(a);
- let a = &Spanned {lo: 0_usize, hi: 5_usize, node: 22_usize};
+ let a = &Spanned {lo: 0, hi: 5, node: 22};
test_rbml(a);
- let a = &Point {x: 3_usize, y: 5_usize};
+ let a = &Point {x: 3, y: 5};
test_rbml(a);
- let a = &Top(22_usize);
+ let a = &Top(22);
test_rbml(a);
- let a = &Bottom(222_usize);
+ let a = &Bottom(222);
test_rbml(a);
let a = &A;
}
impl double for uint {
- fn double(self: Box<uint>) -> uint { *self * 2_usize }
+ fn double(self: Box<uint>) -> uint { *self * 2 }
}
pub fn main() {
- let x: Box<_> = box() (box 3_usize as Box<double>);
- assert_eq!(x.double(), 6_usize);
+ let x: Box<_> = box() (box 3 as Box<double>);
+ assert_eq!(x.double(), 6);
}
}
impl double for Box<uint> {
- fn double(self) -> uint { *self * 2_usize }
+ fn double(self) -> uint { *self * 2 }
}
pub fn main() {
- let x: Box<_> = box 3_usize;
- assert_eq!(x.double(), 6_usize);
+ let x: Box<_> = box 3;
+ assert_eq!(x.double(), 6);
}
}
impl double for Box<uint> {
- fn double(self: Box<Box<uint>>) -> uint { **self * 2_usize }
+ fn double(self: Box<Box<uint>>) -> uint { **self * 2 }
}
pub fn main() {
- let x: Box<Box<Box<Box<Box<_>>>>> = box box box box box 3_usize;
- assert_eq!(x.double(), 6_usize);
+ let x: Box<Box<Box<Box<Box<_>>>>> = box box box box box 3;
+ assert_eq!(x.double(), 6);
}
}
impl double for uint {
- fn double(self: Box<uint>) -> uint { *self * 2_usize }
+ fn double(self: Box<uint>) -> uint { *self * 2 }
}
pub fn main() {
- let x: Box<Box<_>> = box box 3_usize;
- assert_eq!(x.double(), 6_usize);
+ let x: Box<Box<_>> = box box 3;
+ assert_eq!(x.double(), 6);
}
}
impl double for uint {
- fn double(self: Box<uint>) -> uint { *self * 2_usize }
+ fn double(self: Box<uint>) -> uint { *self * 2 }
}
pub fn main() {
- let x: Box<_> = box 3_usize;
- assert_eq!(x.double(), 6_usize);
+ let x: Box<_> = box 3;
+ assert_eq!(x.double(), 6);
}
}
pub fn main() {
- let x: Box<_> = box 3_usize;
+ let x: Box<_> = box 3;
assert_eq!(x.foo(), "box 3".to_string());
}
// except according to those terms.
pub fn main() {
- assert_eq!(0xffffffffu32, (-1 as u32));
- assert_eq!(4294967295u32, (-1 as u32));
- assert_eq!(0xffffffffffffffffu64, (-1 as u64));
- assert_eq!(18446744073709551615u64, (-1 as u64));
+ assert_eq!(0xffffffff, (-1 as u32));
+ assert_eq!(4294967295, (-1 as u32));
+ assert_eq!(0xffffffffffffffff, (-1 as u64));
+ assert_eq!(18446744073709551615, (-1 as u64));
- assert_eq!(-2147483648i32 - 1i32, 2147483647i32);
+ assert_eq!(-2147483648 - 1, 2147483647);
}
}
pub fn main() {
- let x = asBlock(|| 22_usize);
- assert_eq!(x, 22_usize);
+ let x = asBlock(|| 22);
+ assert_eq!(x, 22);
}
fn iter_vec<T, F>(v: Vec<T> , mut f: F) where F: FnMut(&T) { for x in &v { f(x); } }
pub fn main() {
- let v = vec![1i32, 2, 3, 4, 5, 6, 7];
- let mut odds = 0i32;
+ let v = vec![1, 2, 3, 4, 5, 6, 7];
+ let mut odds = 0;
iter_vec(v, |i| {
if *i % 2 == 1 {
odds += 1;
fn iter_vec<T, F>(v: Vec<T>, mut f: F) where F: FnMut(&T) { for x in &v { f(x); } }
pub fn main() {
- let v = vec![1i32, 2, 3, 4, 5];
+ let v = vec![1, 2, 3, 4, 5];
let mut sum = 0;
iter_vec(v.clone(), |i| {
iter_vec(v.clone(), |j| {
// the closures are in scope. Issue #6801.
fn a() -> i32 {
- let mut x = 3i32;
+ let mut x = 3;
x += 1;
let c1 = || x * 4;
let c2 = || x * 5;
}
fn b() -> i32 {
- let mut x = 3i32;
+ let mut x = 3;
x += 1;
let c1 = || get(&x);
let c2 = || get(&x);
}
fn c() -> i32 {
- let mut x = 3i32;
+ let mut x = 3;
x += 1;
let c1 = || x * 5;
let c2 = || get(&x);
fn iter_ints<F>(x: &Ints, mut f: F) -> bool where F: FnMut(&int) -> bool {
let l = x.values.len();
- (0_usize..l).all(|i| f(&x.values[i]))
+ (0..l).all(|i| f(&x.values[i]))
}
pub fn main() {
}
fn main() {
- let mut sum = 0_usize;
- let elems = [ 1_usize, 2, 3, 4, 5 ];
+ let mut sum = 0;
+ let elems = [ 1, 2, 3, 4, 5 ];
each(&elems, |val: &uint| sum += *val);
assert_eq!(sum, 15);
}
assert_eq!(u, 'Q' as u32);
assert_eq!(i as u8, 'Q' as u8);
assert_eq!(i as u8 as i8, 'Q' as u8 as i8);
- assert_eq!(0x51u8 as char, 'Q');
+ assert_eq!(0x51 as char, 'Q');
assert_eq!(0 as u32, false as u32);
}
use cci_borrow_lib::foo;
pub fn main() {
- let p: Box<_> = box 22_usize;
+ let p: Box<_> = box 22;
let r = foo(&*p);
println!("r={}", r);
- assert_eq!(r, 22_usize);
+ assert_eq!(r, 22);
}
//let bt0 = sys::frame_address();
//println!("%?", bt0);
- 3_usize.to(10_usize, |i| {
+ 3.to(10, |i| {
println!("{}", i);
//let bt1 = sys::frame_address();
extern crate cci_iter_lib;
pub fn main() {
- //let bt0 = sys::rusti::frame_address(1u32);
+ //let bt0 = sys::rusti::frame_address(1);
//println!("%?", bt0);
cci_iter_lib::iter(&[1, 2, 3], |i| {
println!("{}", *i);
- //assert!(bt0 == sys::rusti::frame_address(2u32));
+ //assert!(bt0 == sys::rusti::frame_address(2));
})
}
// actually working.
//let bt0 = sys::frame_address();
//println!("%?", bt0);
- iter(vec!(1_usize, 2_usize, 3_usize), |i| {
+ iter(vec!(1, 2, 3), |i| {
println!("{}", i);
//let bt1 = sys::frame_address();
}
pub fn main() {
- let nyan: Box<ToString> = box cat(0_usize, 2, "nyan".to_string()) as Box<ToString>;
+ let nyan: Box<ToString> = box cat(0, 2, "nyan".to_string()) as Box<ToString>;
print_out(nyan, "nyan".to_string());
}
impl cat {
fn meow(&mut self) {
println!("Meow");
- self.meows += 1_usize;
- if self.meows % 5_usize == 0_usize {
+ self.meows += 1;
+ if self.meows % 5 == 0 {
self.how_hungry += 1;
}
}
pub fn main() {
- let mut nyan = cat(0_usize, 2, "nyan".to_string());
+ let mut nyan = cat(0, 2, "nyan".to_string());
let mut nyan: &mut noisy = &mut nyan;
nyan.speak();
}
fn cat(done: extern fn(uint)) -> cat {
cat {
- meows: 0_usize,
+ meows: 0,
done: done
}
}
pub fn cat(in_name: String) -> cat {
cat {
name: in_name,
- meows: 0_usize
+ meows: 0
}
}
}
use cci_class_2::kitties::cat;
pub fn main() {
- let nyan : cat = cat(52_usize, 99);
- let kitty = cat(1000_usize, 2);
+ let nyan : cat = cat(52, 99);
+ let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak();
use cci_class_3::kitties::cat;
pub fn main() {
- let mut nyan : cat = cat(52_usize, 99);
- let kitty = cat(1000_usize, 2);
+ let mut nyan : cat = cat(52, 99);
+ let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak();
- assert_eq!(nyan.meow_count(), 53_usize);
+ assert_eq!(nyan.meow_count(), 53);
}
}
impl cat {
- pub fn speak(&mut self) { self.meows += 1_usize; }
+ pub fn speak(&mut self) { self.meows += 1; }
pub fn meow_count(&mut self) -> uint { self.meows }
}
}
pub fn main() {
- let mut nyan: cat = cat(52_usize, 99);
- let kitty = cat(1000_usize, 2);
+ let mut nyan: cat = cat(52, 99);
+ let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak();
- assert_eq!(nyan.meow_count(), 53_usize);
+ assert_eq!(nyan.meow_count(), 53);
}
}
pub fn main() {
- let mut nyan : cat<int> = cat::<int>(52_usize, 99, vec!(9));
- let mut kitty = cat(1000_usize, 2, vec!("tabby".to_string()));
+ let mut nyan : cat<int> = cat::<int>(52, 99, vec!(9));
+ let mut kitty = cat(1000, 2, vec!("tabby".to_string()));
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak(vec!(1,2,3));
- assert_eq!(nyan.meow_count(), 55_usize);
+ assert_eq!(nyan.meow_count(), 55);
kitty.speak(vec!("meow".to_string(), "mew".to_string(), "purr".to_string(), "chirp".to_string()));
- assert_eq!(kitty.meow_count(), 1004_usize);
+ assert_eq!(kitty.meow_count(), 1004);
}
impl cat {
fn meow(&mut self) {
println!("Meow");
- self.meows += 1_usize;
- if self.meows % 5_usize == 0_usize {
+ self.meows += 1;
+ if self.meows % 5 == 0 {
self.how_hungry += 1;
}
}
}
pub fn main() {
- let nyan: Box<ToString> = box cat(0_usize, 2, "nyan".to_string()) as Box<ToString>;
+ let nyan: Box<ToString> = box cat(0, 2, "nyan".to_string()) as Box<ToString>;
print_out(nyan, "nyan".to_string());
}
}
impl<U> cat<U> {
- pub fn speak(&mut self) { self.meows += 1_usize; }
+ pub fn speak(&mut self) { self.meows += 1; }
pub fn meow_count(&mut self) -> uint { self.meows }
}
pub fn main() {
- let _nyan : cat<int> = cat::<int>(52_usize, 99);
- // let mut kitty = cat(1000_usize, 2);
+ let _nyan : cat<int> = cat::<int>(52, 99);
+ // let mut kitty = cat(1000, 2);
}
use cci_class::kitties::cat;
pub fn main() {
- let nyan : cat = cat(52_usize, 99);
- let kitty = cat(1000_usize, 2);
+ let nyan : cat = cat(52, 99);
+ let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
}
}
pub fn main() {
- let mut nyan : cat = cat(52_usize, 99);
- let kitty = cat(1000_usize, 2);
+ let mut nyan : cat = cat(52, 99);
+ let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
nyan.speak();
}
pub fn main() {
- let nyan : cat = cat(52_usize, 99);
- let kitty = cat(1000_usize, 2);
+ let nyan : cat = cat(52, 99);
+ let kitty = cat(1000, 2);
assert_eq!(nyan.how_hungry, 99);
assert_eq!(kitty.how_hungry, 2);
}
}
pub fn main() {
- let mut the_vec = vec!(1_usize, 2, 3, 100);
+ let mut the_vec = vec!(1, 2, 3, 100);
assert_eq!(the_vec.clone(), bar(&mut the_vec));
assert_eq!(the_vec.clone(), bip(&the_vec));
}
assert_eq!(concat!("qux", "quux",).to_string(), "quxquux".to_string());
assert_eq!(
- concat!(1, 2, 3_usize, 4f32, 4.0, 'a', true),
+ concat!(1, 2, 3, 4f32, 4.0, 'a', true),
"12344.0atrue"
);
assert!(match "12344.0atrue" {
- concat!(1, 2, 3_usize, 4f32, 4.0, 'a', true) => true,
+ concat!(1, 2, 3, 4f32, 4.0, 'a', true) => true,
_ => false
})
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --cfg ndebug
+// compile-flags: -C debug-assertions=no
// exec-env:RUST_LOG=conditional-debug-macro-off=4
#[macro_use]
assert_eq!(BLOCK_FN(300), 300);
assert_eq!(BLOCK_ENUM_CONSTRUCTOR(200), Some(200));
// FIXME #13972
- // assert_eq!(BLOCK_UNSAFE_SAFE_PTR as *const isize as usize, 0xdeadbeef_us);
- // assert_eq!(BLOCK_UNSAFE_SAFE_PTR_2 as *const isize as usize, 0xdeadbeef_us);
+ // assert_eq!(BLOCK_UNSAFE_SAFE_PTR as *const isize as usize, 0xdeadbeef);
+ // assert_eq!(BLOCK_UNSAFE_SAFE_PTR_2 as *const isize as usize, 0xdeadbeef);
}
foo("hi".to_string());
foo(~[1, 2, 3]);
foo(F{field: 42});
- foo((1, 2_usize));
+ foo((1, 2));
foo(@1);*/
foo(Box::new(1));
}
// except according to those terms.
fn check_expr() {
- let _: & uint = &1_usize;
- let _: & & uint = &&1_usize;
- let _: & & & uint = &&&1_usize;
- let _: & & & uint = & &&1_usize;
- let _: & & & & uint = &&&&1_usize;
- let _: & & & & uint = & &&&1_usize;
- let _: & & & & & uint = &&&&&1_usize;
+ let _: & uint = &1;
+ let _: & & uint = &&1;
+ let _: & & & uint = &&&1;
+ let _: & & & uint = & &&1;
+ let _: & & & & uint = &&&&1;
+ let _: & & & & uint = & &&&1;
+ let _: & & & & & uint = &&&&&1;
}
fn check_ty() {
- let _: &uint = & 1_usize;
- let _: &&uint = & & 1_usize;
- let _: &&&uint = & & & 1_usize;
- let _: & &&uint = & & & 1_usize;
- let _: &&&&uint = & & & & 1_usize;
- let _: & &&&uint = & & & & 1_usize;
- let _: &&&&&uint = & & & & & 1_usize;
+ let _: &uint = & 1;
+ let _: &&uint = & & 1;
+ let _: &&&uint = & & & 1;
+ let _: & &&uint = & & & 1;
+ let _: &&&&uint = & & & & 1;
+ let _: & &&&uint = & & & & 1;
+ let _: &&&&&uint = & & & & & 1;
}
fn check_pat() {
let (sender, receiver) = channel();
{
- let v = Foo::NestedVariant(box 42_usize, SendOnDrop { sender: sender.clone() }, sender);
+ let v = Foo::NestedVariant(box 42, SendOnDrop { sender: sender.clone() }, sender);
}
assert_eq!(receiver.recv().unwrap(), Message::DestructorRan);
assert_eq!(receiver.recv().unwrap(), Message::Dropped);
let (sender, receiver) = channel();
let t = {
thread::spawn(move|| {
- let mut v = Foo::NestedVariant(box 42usize, SendOnDrop {
+ let mut v = Foo::NestedVariant(box 42, SendOnDrop {
sender: sender.clone()
}, sender.clone());
- v = Foo::NestedVariant(box 42_usize,
+ v = Foo::NestedVariant(box 42,
SendOnDrop { sender: sender.clone() },
sender.clone());
v = Foo::SimpleVariant(sender.clone());
pub fn main() {
unsafe {
- assert_eq!(22_u8, rust_dbg_extern_identity_u8(22_u8));
+ assert_eq!(22, rust_dbg_extern_identity_u8(22));
}
}
pub fn main() {
unsafe {
- assert_eq!(22_u32, rust_dbg_extern_identity_u32(22_u32));
+ assert_eq!(22, rust_dbg_extern_identity_u32(22));
}
}
pub fn main() {
unsafe {
- assert_eq!(22_u64, rust_dbg_extern_identity_u64(22_u64));
+ assert_eq!(22, rust_dbg_extern_identity_u64(22));
}
}
pub fn main() {
let len = strlen("Rust".to_string());
- assert_eq!(len, 4_usize);
+ assert_eq!(len, 4);
}
-pub fn main() { let mut x: i32 = -400_i32; x = 0_i32 - x; assert!((x == 400_i32)); }
+pub fn main() { let mut x: i32 = -400; x = 0 - x; assert!((x == 400)); }
pub fn main() {
- let mut x: i8 = -12i8;
- let y: i8 = -12i8;
- x = x + 1i8;
- x = x - 1i8;
+ let mut x: i8 = -12;
+ let y: i8 = -12;
+ x = x + 1;
+ x = x - 1;
assert_eq!(x, y);
}
// except according to those terms.
fn even(x: uint) -> bool {
- if x < 2_usize {
+ if x < 2 {
return false;
- } else if x == 2_usize { return true; } else { return even(x - 2_usize); }
+ } else if x == 2 { return true; } else { return even(x - 2); }
}
fn foo(x: uint) {
}
}
-pub fn main() { foo(2_usize); }
+pub fn main() { foo(2); }
#[cfg(target_arch = "x86")]
pub fn main() {
unsafe {
- assert_eq!(::rusti::pref_align_of::<u64>(), 8_usize);
- assert_eq!(::rusti::min_align_of::<u64>(), 4_usize);
+ assert_eq!(::rusti::pref_align_of::<u64>(), 8);
+ assert_eq!(::rusti::min_align_of::<u64>(), 4);
}
}
#[cfg(any(target_arch = "x86_64", target_arch = "arm", target_arch = "aarch64"))]
pub fn main() {
unsafe {
- assert_eq!(::rusti::pref_align_of::<u64>(), 8_usize);
- assert_eq!(::rusti::min_align_of::<u64>(), 8_usize);
+ assert_eq!(::rusti::pref_align_of::<u64>(), 8);
+ assert_eq!(::rusti::min_align_of::<u64>(), 8);
}
}
}
#[cfg(target_arch = "x86_64")]
pub fn main() {
unsafe {
- assert_eq!(::rusti::pref_align_of::<u64>(), 8u);
- assert_eq!(::rusti::min_align_of::<u64>(), 8u);
+ assert_eq!(::rusti::pref_align_of::<u64>(), 8);
+ assert_eq!(::rusti::min_align_of::<u64>(), 8);
}
}
}
#[cfg(target_arch = "x86")]
pub fn main() {
unsafe {
- assert_eq!(::rusti::pref_align_of::<u64>(), 8_usize);
- assert_eq!(::rusti::min_align_of::<u64>(), 8_usize);
+ assert_eq!(::rusti::pref_align_of::<u64>(), 8);
+ assert_eq!(::rusti::min_align_of::<u64>(), 8);
}
}
#[cfg(target_arch = "x86_64")]
pub fn main() {
unsafe {
- assert_eq!(::rusti::pref_align_of::<u64>(), 8_usize);
- assert_eq!(::rusti::min_align_of::<u64>(), 8_usize);
+ assert_eq!(::rusti::pref_align_of::<u64>(), 8);
+ assert_eq!(::rusti::min_align_of::<u64>(), 8);
}
}
}
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
pub fn main() {
unsafe {
- assert_eq!(::rusti::pref_align_of::<u64>(), 8_usize);
- assert_eq!(::rusti::min_align_of::<u64>(), 8_usize);
+ assert_eq!(::rusti::pref_align_of::<u64>(), 8);
+ assert_eq!(::rusti::min_align_of::<u64>(), 8);
}
}
}
unsafe {
use rusti::*;
- assert_eq!(ctpop8(0u8), 0u8);
- assert_eq!(ctpop16(0u16), 0u16);
- assert_eq!(ctpop32(0u32), 0u32);
- assert_eq!(ctpop64(0u64), 0u64);
-
- assert_eq!(ctpop8(1u8), 1u8);
- assert_eq!(ctpop16(1u16), 1u16);
- assert_eq!(ctpop32(1u32), 1u32);
- assert_eq!(ctpop64(1u64), 1u64);
-
- assert_eq!(ctpop8(10u8), 2u8);
- assert_eq!(ctpop16(10u16), 2u16);
- assert_eq!(ctpop32(10u32), 2u32);
- assert_eq!(ctpop64(10u64), 2u64);
-
- assert_eq!(ctpop8(100u8), 3u8);
- assert_eq!(ctpop16(100u16), 3u16);
- assert_eq!(ctpop32(100u32), 3u32);
- assert_eq!(ctpop64(100u64), 3u64);
-
- assert_eq!(ctpop8(-1u8), 8u8);
- assert_eq!(ctpop16(-1u16), 16u16);
- assert_eq!(ctpop32(-1u32), 32u32);
- assert_eq!(ctpop64(-1u64), 64u64);
-
- assert_eq!(ctlz8(0u8), 8u8);
- assert_eq!(ctlz16(0u16), 16u16);
- assert_eq!(ctlz32(0u32), 32u32);
- assert_eq!(ctlz64(0u64), 64u64);
-
- assert_eq!(ctlz8(1u8), 7u8);
- assert_eq!(ctlz16(1u16), 15u16);
- assert_eq!(ctlz32(1u32), 31u32);
- assert_eq!(ctlz64(1u64), 63u64);
-
- assert_eq!(ctlz8(10u8), 4u8);
- assert_eq!(ctlz16(10u16), 12u16);
- assert_eq!(ctlz32(10u32), 28u32);
- assert_eq!(ctlz64(10u64), 60u64);
-
- assert_eq!(ctlz8(100u8), 1u8);
- assert_eq!(ctlz16(100u16), 9u16);
- assert_eq!(ctlz32(100u32), 25u32);
- assert_eq!(ctlz64(100u64), 57u64);
-
- assert_eq!(cttz8(-1u8), 0u8);
- assert_eq!(cttz16(-1u16), 0u16);
- assert_eq!(cttz32(-1u32), 0u32);
- assert_eq!(cttz64(-1u64), 0u64);
-
- assert_eq!(cttz8(0u8), 8u8);
- assert_eq!(cttz16(0u16), 16u16);
- assert_eq!(cttz32(0u32), 32u32);
- assert_eq!(cttz64(0u64), 64u64);
-
- assert_eq!(cttz8(1u8), 0u8);
- assert_eq!(cttz16(1u16), 0u16);
- assert_eq!(cttz32(1u32), 0u32);
- assert_eq!(cttz64(1u64), 0u64);
-
- assert_eq!(cttz8(10u8), 1u8);
- assert_eq!(cttz16(10u16), 1u16);
- assert_eq!(cttz32(10u32), 1u32);
- assert_eq!(cttz64(10u64), 1u64);
-
- assert_eq!(cttz8(100u8), 2u8);
- assert_eq!(cttz16(100u16), 2u16);
- assert_eq!(cttz32(100u32), 2u32);
- assert_eq!(cttz64(100u64), 2u64);
-
- assert_eq!(cttz8(-1u8), 0u8);
- assert_eq!(cttz16(-1u16), 0u16);
- assert_eq!(cttz32(-1u32), 0u32);
- assert_eq!(cttz64(-1u64), 0u64);
-
- assert_eq!(bswap16(0x0A0Bu16), 0x0B0Au16);
- assert_eq!(bswap32(0x0ABBCC0Du32), 0x0DCCBB0Au32);
- assert_eq!(bswap64(0x0122334455667708u64), 0x0877665544332201u64);
+ assert_eq!(ctpop8(0), 0);
+ assert_eq!(ctpop16(0), 0);
+ assert_eq!(ctpop32(0), 0);
+ assert_eq!(ctpop64(0), 0);
+
+ assert_eq!(ctpop8(1), 1);
+ assert_eq!(ctpop16(1), 1);
+ assert_eq!(ctpop32(1), 1);
+ assert_eq!(ctpop64(1), 1);
+
+ assert_eq!(ctpop8(10), 2);
+ assert_eq!(ctpop16(10), 2);
+ assert_eq!(ctpop32(10), 2);
+ assert_eq!(ctpop64(10), 2);
+
+ assert_eq!(ctpop8(100), 3);
+ assert_eq!(ctpop16(100), 3);
+ assert_eq!(ctpop32(100), 3);
+ assert_eq!(ctpop64(100), 3);
+
+ assert_eq!(ctpop8(-1), 8);
+ assert_eq!(ctpop16(-1), 16);
+ assert_eq!(ctpop32(-1), 32);
+ assert_eq!(ctpop64(-1), 64);
+
+ assert_eq!(ctlz8(0), 8);
+ assert_eq!(ctlz16(0), 16);
+ assert_eq!(ctlz32(0), 32);
+ assert_eq!(ctlz64(0), 64);
+
+ assert_eq!(ctlz8(1), 7);
+ assert_eq!(ctlz16(1), 15);
+ assert_eq!(ctlz32(1), 31);
+ assert_eq!(ctlz64(1), 63);
+
+ assert_eq!(ctlz8(10), 4);
+ assert_eq!(ctlz16(10), 12);
+ assert_eq!(ctlz32(10), 28);
+ assert_eq!(ctlz64(10), 60);
+
+ assert_eq!(ctlz8(100), 1);
+ assert_eq!(ctlz16(100), 9);
+ assert_eq!(ctlz32(100), 25);
+ assert_eq!(ctlz64(100), 57);
+
+ assert_eq!(cttz8(-1), 0);
+ assert_eq!(cttz16(-1), 0);
+ assert_eq!(cttz32(-1), 0);
+ assert_eq!(cttz64(-1), 0);
+
+ assert_eq!(cttz8(0), 8);
+ assert_eq!(cttz16(0), 16);
+ assert_eq!(cttz32(0), 32);
+ assert_eq!(cttz64(0), 64);
+
+ assert_eq!(cttz8(1), 0);
+ assert_eq!(cttz16(1), 0);
+ assert_eq!(cttz32(1), 0);
+ assert_eq!(cttz64(1), 0);
+
+ assert_eq!(cttz8(10), 1);
+ assert_eq!(cttz16(10), 1);
+ assert_eq!(cttz32(10), 1);
+ assert_eq!(cttz64(10), 1);
+
+ assert_eq!(cttz8(100), 2);
+ assert_eq!(cttz16(100), 2);
+ assert_eq!(cttz32(100), 2);
+ assert_eq!(cttz64(100), 2);
+
+ assert_eq!(cttz8(-1), 0);
+ assert_eq!(cttz16(-1), 0);
+ assert_eq!(cttz32(-1), 0);
+ assert_eq!(cttz64(-1), 0);
+
+ assert_eq!(bswap16(0x0A0B), 0x0B0A);
+ assert_eq!(bswap32(0x0ABBCC0D), 0x0DCCBB0A);
+ assert_eq!(bswap64(0x0122334455667708), 0x0877665544332201);
}
}
assert_approx_eq!(sqrtf32(64f32), 8f32);
assert_approx_eq!(sqrtf64(64f64), 8f64);
- assert_approx_eq!(powif32(25f32, -2i32), 0.0016f32);
- assert_approx_eq!(powif64(23.2f64, 2i32), 538.24f64);
+ assert_approx_eq!(powif32(25f32, -2), 0.0016f32);
+ assert_approx_eq!(powif64(23.2f64, 2), 538.24f64);
assert_approx_eq!(sinf32(0f32), 0f32);
assert_approx_eq!(sinf64(f64::consts::PI / 2f64), 1f64);
pub fn main() {
let x: X<int> = X {
a: 12345678,
- b: 9u8,
+ b: 9,
c: true,
- d: 10u8,
- e: 11u16,
- f: 12u8,
- g: 13u8
+ d: 10,
+ e: 11,
+ f: 12,
+ g: 13
};
bar(x);
}
fn bar<T>(x: X<T>) {
- assert_eq!(x.b, 9u8);
+ assert_eq!(x.b, 9);
assert_eq!(x.c, true);
- assert_eq!(x.d, 10u8);
- assert_eq!(x.e, 11u16);
- assert_eq!(x.f, 12u8);
- assert_eq!(x.g, 13u8);
+ assert_eq!(x.d, 10);
+ assert_eq!(x.e, 11);
+ assert_eq!(x.f, 12);
+ assert_eq!(x.g, 13);
}
fn main() {
// Generate sieve of Eratosthenes for n up to 1e6
- let n = 1000000_usize;
+ let n = 1000000;
let mut sieve = BitVec::from_elem(n+1, true);
let limit: uint = (n as f32).sqrt() as uint;
for i in 2..limit+1 {
use std::thunk::Thunk;
pub fn main() {
- let mut x = 1i32;
+ let mut x = 1;
let _thunk = Thunk::new(move|| { x = 2; });
}
}
fn main() {
- let arr = [(1, 1_usize), (2, 2), (3, 3)];
+ let arr = [(1, 1), (2, 2), (3, 3)];
let v1: Vec<&_> = arr.iter().collect();
let v2: Vec<_> = arr.iter().map(copy).collect();
}
fn match_on_upvar() {
- let mut foo: Option<Box<_>> = Some(box 8i32);
+ let mut foo: Option<Box<_>> = Some(box 8);
let f = move|| {
match foo {
None => {},
use std::iter::AdditiveIterator;
fn main() {
let x: [u64; 3] = [1, 2, 3];
- assert_eq!(6, (0_usize..3).map(|i| x[i]).sum());
+ assert_eq!(6, (0..3).map(|i| x[i]).sum());
}
}
fn main() {
- let m = Mat::new(vec!(1_usize, 2, 3, 4, 5, 6), 3);
+ let m = Mat::new(vec!(1, 2, 3, 4, 5, 6), 3);
let r = m.row(1);
assert!(r.index(&2) == &6);
assert!(r[2] == 6);
- assert!(r[2_usize] == 6_usize);
+ assert!(r[2] == 6);
assert!(6 == r[2]);
let e = r[2];
struct Bar<'a> { m: marker::PhantomData<&'a ()> }
impl<'a> i::Foo<'a, uint> for Bar<'a> {
- fn foo(&self) -> uint { 5_usize }
+ fn foo(&self) -> uint { 5 }
}
pub fn main() {
#[allow(unused_must_use)]
fn main() {
- (0_usize..10).map(uint_to_foo);
+ (0..10).map(uint_to_foo);
}
}
pub fn main() {
- fn box_1() -> Box<[i8; 1]> { Box::new( [1i8; 1] ) }
- fn box_2() -> Box<[i8; 2]> { Box::new( [1i8; 2] ) }
- fn box_3() -> Box<[i8; 3]> { Box::new( [1i8; 3] ) }
- fn box_4() -> Box<[i8; 4]> { Box::new( [1i8; 4] ) }
+ fn box_1() -> Box<[i8; 1]> { Box::new( [1; 1] ) }
+ fn box_2() -> Box<[i8; 2]> { Box::new( [1; 2] ) }
+ fn box_3() -> Box<[i8; 3]> { Box::new( [1; 3] ) }
+ fn box_4() -> Box<[i8; 4]> { Box::new( [1; 4] ) }
foo(box_1, box_2, box_3, box_4);
}
}
pub fn main() {
- fn box_1() -> Box<[i8; 1]> { Box::new( [1i8] ) }
- fn box_2() -> Box<[i8; 20]> { Box::new( [1i8; 20] ) }
- fn box_3() -> Box<[i8; 300]> { Box::new( [1i8; 300] ) }
- fn box_4() -> Box<[i8; 4000]> { Box::new( [1i8; 4000] ) }
+ fn box_1() -> Box<[i8; 1]> { Box::new( [1] ) }
+ fn box_2() -> Box<[i8; 20]> { Box::new( [1; 20] ) }
+ fn box_3() -> Box<[i8; 300]> { Box::new( [1; 300] ) }
+ fn box_4() -> Box<[i8; 4000]> { Box::new( [1; 4000] ) }
foo(box_1, box_2, box_3, box_4);
}
use std::fmt;
fn main() {
- let a: &fmt::Debug = &1_i32;
+ let a: &fmt::Debug = &1;
format!("{:?}", a);
}
use m::{START, END};
fn main() {
- match 42u32 {
+ match 42 {
m::START...m::END => {},
- 0u32...m::END => {},
- m::START...59u32 => {},
+ 0...m::END => {},
+ m::START...59 => {},
_ => {},
}
}
let mut i = lo;
while i < hi {
it(i);
- i += 1_usize;
+ i += 1;
}
}
pub fn main() {
- let range: 'static ||uint|| = |a| range(0_usize, 1000_usize, a);
+ let range: 'static ||uint|| = |a| range(0, 1000, a);
let filt: 'static ||v: uint|| = |a| filter(
range,
- |&&n: uint| n % 3_usize != 0_usize && n % 5_usize != 0_usize,
+ |&&n: uint| n % 3 != 0 && n % 5 != 0,
a);
- let sum = foldl(filt, 0_usize, |accum, &&n: uint| accum + n );
+ let sum = foldl(filt, 0, |accum, &&n: uint| accum + n );
println!("{}", sum);
}
}
fn main() {
- let xs = vec![1u8, 2, 3, 4, 5];
+ let xs = vec![1, 2, 3, 4, 5];
assert_eq!(xs.into_iter().digit_sum(), 15);
}
}
pub fn main() {
- f(C(1_usize));
+ f(C(1));
}
// the position of this function is significant! - if it comes before methods
// then it works, if it comes after it then it doesn't!
fn to_bools(bitv: Storage) -> Vec<bool> {
- (0_usize..8).map(|i| {
+ (0..8).map(|i| {
let w = i / 64;
let b = i % 64;
- let x = 1u64 & (bitv.storage[w] >> b);
- x == 1u64
+ let x = 1 & (bitv.storage[w] >> b);
+ x == 1
}).collect()
}
let bools = vec!(false, false, true, false, false, true, true, false);
let bools2 = to_bools(Storage{storage: vec!(0b01100100)});
- for i in 0_usize..8 {
+ for i in 0..8 {
println!("{} => {} vs {}", i, bools[i], bools2[i]);
}
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
let callback: SamplesFn = Box::new(move |buffer| {
- for i in 0_usize..buffer.len() {
+ for i in 0..buffer.len() {
println!("{}: {}", i, buffer[i])
}
});
pub fn main() {
let i: uint = 0;
- assert!(i <= 0xFFFF_FFFF_usize);
+ assert!(i <= 0xFFFF_FFFF);
let i: int = 0;
- assert!(i >= -0x8000_0000__isize);
- assert!(i <= 0x7FFF_FFFF__isize);
+ assert!(i >= -0x8000_0000);
+ assert!(i <= 0x7FFF_FFFF);
}
assert_eq!(unsafe { NUM_DROPS }, 3);
{ let _x = FooBar::_Foo(Foo); }
assert_eq!(unsafe { NUM_DROPS }, 5);
- { let _x = FooBar::_Bar(42_usize); }
+ { let _x = FooBar::_Bar(42); }
assert_eq!(unsafe { NUM_DROPS }, 6);
{ let _ = Foo; }
assert_eq!(unsafe { NUM_DROPS }, 9);
{ let _ = FooBar::_Foo(Foo); }
assert_eq!(unsafe { NUM_DROPS }, 11);
- { let _ = FooBar::_Bar(42_usize); }
+ { let _ = FooBar::_Bar(42); }
assert_eq!(unsafe { NUM_DROPS }, 12);
}
struct signature<'a> { pattern : &'a [u32] }
static test1: signature<'static> = signature {
- pattern: &[0x243f6a88u32,0x85a308d3u32,0x13198a2eu32,0x03707344u32,0xa4093822u32,0x299f31d0u32]
+ pattern: &[0x243f6a88,0x85a308d3,0x13198a2e,0x03707344,0xa4093822,0x299f31d0]
};
pub fn main() {
- let test: &[u32] = &[0x243f6a88u32,0x85a308d3u32,0x13198a2eu32,
- 0x03707344u32,0xa4093822u32,0x299f31d0u32];
+ let test: &[u32] = &[0x243f6a88,0x85a308d3,0x13198a2e,
+ 0x03707344,0xa4093822,0x299f31d0];
println!("{}",test==test1.pattern);
}
struct X { pub x: uint }
impl Default for X {
fn default() -> X {
- X { x: 42_usize }
+ X { x: 42 }
}
}
extern crate issue2170lib;
pub fn main() {
- // let _ = issue2170lib::rsrc(2i32);
+ // let _ = issue2170lib::rsrc(2);
}
fn producer(tx: &Sender<Vec<u8>>) {
tx.send(
- vec!(1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 9u8, 10u8, 11u8, 12u8,
- 13u8)).unwrap();
+ vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13)).unwrap();
}
pub fn main() {
fn foo() -> Box<FnMut() -> isize + 'static> {
let k: Box<_> = box 22;
let _u = A {a: k.clone()};
- // FIXME(#16640) suffix in `22_isize` suffix shouldn't be necessary
- let result = || 22_isize;
+ // FIXME(#16640) suffix in `22` suffix shouldn't be necessary
+ let result = || 22;
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
Box::new(result)
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags:--cfg ndebug
+// compile-flags:-C debug-assertions=no
// exec-env:RUST_LOG=logging-enabled-debug=debug
#[macro_use]
// ignore-windows
// exec-env:RUST_LOG=debug
+// compile-flags:-C debug-assertions=y
#[macro_use]
extern crate log;
pub fn main() {
assert!(overly_complicated!(f, x, Option<uint>, { return Some(x); },
- Some(8_usize), Some(y), y) == 8_usize)
+ Some(8), Some(y), y) == 8)
}
}
pub fn main() {
- assert_eq!(1_usize, f(Some('x')));
- assert_eq!(2_usize, f(Some('y')));
- assert_eq!(3_usize, f(None));
+ assert_eq!(1, f(Some('x')));
+ assert_eq!(2, f(Some('y')));
+ assert_eq!(3, f(None));
assert_eq!(1, match Some('x') {
Some(char_x!()) => 1,
None => return (),
Some(num) => num as u32
};
- assert_eq!(f, 1234u32);
+ assert_eq!(f, 1234);
println!("{}", f)
}
}
fn main() {
- let mut buf = [0_u8; 6];
+ let mut buf = [0; 6];
{
let mut writer: &mut [_] = &mut buf;
x.foo(&x);
- assert!(method_self_arg1::get_count() == 2u64*3*3*3*5*5*5*7*7*7);
+ assert!(method_self_arg1::get_count() == 2*3*3*3*5*5*5*7*7*7);
}
x.run_trait();
- assert!(method_self_arg2::get_count() == 2u64*2*3*3*5*5*7*7*11*11*13*13*17);
+ assert!(method_self_arg2::get_count() == 2*2*3*3*5*5*7*7*11*11*13*13*17);
}
x.baz();
- unsafe { assert!(COUNT == 2u64*2*3*3*5*5*7*7*11*11*13*13*17); }
+ unsafe { assert!(COUNT == 2*2*3*3*5*5*7*7*11*11*13*13*17); }
}
x.foo(&x);
- unsafe { assert!(COUNT == 2_usize*3*3*3*5*5*5*7*7*7); }
+ unsafe { assert!(COUNT == 2*3*3*3*5*5*5*7*7*7); }
}
check_fancy!($e, $T, |ptr| assert!(*ptr == $e));
}};
($e:expr, $T:ty, |$v:ident| $chk:expr) => {{
- assert!(E::Nothing::<$T>((), ((), ()), [23i8; 0]).is_none());
+ assert!(E::Nothing::<$T>((), ((), ()), [23; 0]).is_none());
let e = $e;
let t_ = E::Thing::<$T>(23, e);
match t_.get_ref() {
}
fn main() {
- let x = 22_i32;
+ let x = 22;
let x1: &SomeTrait<SomeType=i32> = &x;
let y = get_int(x1);
assert_eq!(x, y);
}
pub fn main() {
- let mut x = 22_usize;
+ let mut x = 22;
let obj = &mut x as &mut Foo;
do_it_mut(obj);
- do_it_imm(obj, 23_usize);
+ do_it_imm(obj, 23);
do_it_mut(obj);
}
box BarStruct{ x: 2 } as Box<FooTrait>
);
- for i in 0_usize..foos.len() {
+ for i in 0..foos.len() {
assert_eq!(i, foos[i].foo());
}
}
pub fn main() {
assert_eq!(or_alt(blah::c), 0);
- assert_eq!(or_alt(blah::a(10, 100, 0_usize)), 110);
+ assert_eq!(or_alt(blah::a(10, 100, 0)), 110);
assert_eq!(or_alt(blah::b(20, 200)), 220);
}
fn main() {
// ICE trigger
- (G(PhantomData))(1_i32);
+ (G(PhantomData))(1);
}
assert_eq!(mem::size_of::<[Foo; 10]>(), 90);
- for i in 0_usize..10 {
+ for i in 0..10 {
assert_eq!(foos[i], Foo { bar: 1, baz: 2});
}
pub fn bar(_offset: uint) { }
}
-pub fn main() { foo::bar(0_usize); }
+pub fn main() { foo::bar(0); }
}
pub fn main() {
- let mut nyan : cat = cat(52_usize, 99);
- assert_eq!(nyan.meow_count(), 52_usize);
+ let mut nyan : cat = cat(52, 99);
+ assert_eq!(nyan.meow_count(), 52);
}
#![feature(box_syntax)]
fn sums_to(v: Vec<int> , sum: int) -> bool {
- let mut i = 0_usize;
+ let mut i = 0;
let mut sum0 = 0;
while i < v.len() {
sum0 += v[i];
- i += 1_usize;
+ i += 1;
}
return sum0 == sum;
}
fn sums_to_using_uniq(v: Vec<int> , sum: int) -> bool {
- let mut i = 0_usize;
+ let mut i = 0;
let mut sum0: Box<_> = box 0;
while i < v.len() {
*sum0 += v[i];
- i += 1_usize;
+ i += 1;
}
return *sum0 == sum;
}
fn sums_to_using_rec(v: Vec<int> , sum: int) -> bool {
- let mut i = 0_usize;
+ let mut i = 0;
let mut sum0 = F {f: 0};
while i < v.len() {
sum0.f += v[i];
- i += 1_usize;
+ i += 1;
}
return sum0.f == sum;
}
struct F<T> { f: T }
fn sums_to_using_uniq_rec(v: Vec<int> , sum: int) -> bool {
- let mut i = 0_usize;
+ let mut i = 0;
let mut sum0 = F::<Box<_>> {f: box 0};
while i < v.len() {
*sum0.f += v[i];
- i += 1_usize;
+ i += 1;
}
return *sum0.f == sum;
}
let x = ..1+3;
assert!(x == (..4));
- let a = &[0i32, 1, 2, 3, 4, 5, 6];
+ let a = &[0, 1, 2, 3, 4, 5, 6];
let x = &a[1+1..2+2];
assert!(x == &a[2..4]);
let x = &a[..1+2];
// from pairs of rows (where each pair of rows is equally sized),
// and the elements of the triangle match their row-pair index.
unsafe fn sanity_check(ascend: &[*mut u8]) {
- for i in 0_usize..COUNT / 2 {
+ for i in 0..COUNT / 2 {
let (p0, p1, size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
- for j in 0_usize..size {
+ for j in 0..size {
assert_eq!(*p0.offset(j as int), i as u8);
assert_eq!(*p1.offset(j as int), i as u8);
}
// that at least two rows will be allocated near each other, so
// that we trigger the bug (a buffer overrun) in an observable
// way.)
- for i in 0_usize..COUNT / 2 {
+ for i in 0..COUNT / 2 {
let size = idx_to_size(i);
ascend[2*i] = allocate(size, ALIGN);
ascend[2*i+1] = allocate(size, ALIGN);
}
// Initialize each pair of rows to distinct value.
- for i in 0_usize..COUNT / 2 {
+ for i in 0..COUNT / 2 {
let (p0, p1, size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
for j in 0..size {
*p0.offset(j as int) = i as u8;
test_3(ascend); // triangle -> square
test_4(ascend); // square -> triangle
- for i in 0_usize..COUNT / 2 {
+ for i in 0..COUNT / 2 {
let size = idx_to_size(i);
deallocate(ascend[2*i], size, ALIGN);
deallocate(ascend[2*i+1], size, ALIGN);
// rows as we go.
unsafe fn test_1(ascend: &mut [*mut u8]) {
let new_size = idx_to_size(COUNT-1);
- for i in 0_usize..COUNT / 2 {
+ for i in 0..COUNT / 2 {
let (p0, p1, old_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
assert!(old_size < new_size);
// Test 2: turn the square back into a triangle, top to bottom.
unsafe fn test_2(ascend: &mut [*mut u8]) {
let old_size = idx_to_size(COUNT-1);
- for i in 0_usize..COUNT / 2 {
+ for i in 0..COUNT / 2 {
let (p0, p1, new_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
assert!(new_size < old_size);
// Test 3: turn triangle into a square, bottom to top.
unsafe fn test_3(ascend: &mut [*mut u8]) {
let new_size = idx_to_size(COUNT-1);
- for i in (0_usize..COUNT / 2).rev() {
+ for i in (0..COUNT / 2).rev() {
let (p0, p1, old_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
assert!(old_size < new_size);
// Test 4: turn the square back into a triangle, bottom to top.
unsafe fn test_4(ascend: &mut [*mut u8]) {
let old_size = idx_to_size(COUNT-1);
- for i in (0_usize..COUNT / 2).rev() {
+ for i in (0..COUNT / 2).rev() {
let (p0, p1, new_size) = (ascend[2*i], ascend[2*i+1], idx_to_size(i));
assert!(new_size < old_size);
#[cfg(any(target_arch = "x86", target_arch = "arm", target_arch = "aarch64"))]
mod m {
- pub fn align() -> uint { 4_usize }
- pub fn size() -> uint { 8_usize }
+ pub fn align() -> uint { 4 }
+ pub fn size() -> uint { 8 }
}
#[cfg(target_arch = "x86_64")]
mod m {
- pub fn align() -> uint { 4_usize }
- pub fn size() -> uint { 8_usize }
+ pub fn align() -> uint { 4 }
+ pub fn size() -> uint { 8 }
}
pub fn main() {
unsafe {
- let x = Outer {c8: 22u8, t: Inner {c64: 44u32}};
+ let x = Outer {c8: 22, t: Inner {c64: 44}};
// Send it through the shape code
let y = format!("{:?}", x);
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
- pub fn align() -> uint { 4_usize }
- pub fn size() -> uint { 12_usize }
+ pub fn align() -> uint { 4 }
+ pub fn size() -> uint { 12 }
}
#[cfg(any(target_arch = "x86_64", target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
- pub fn align() -> uint { 8_usize }
- pub fn size() -> uint { 16_usize }
+ pub fn align() -> uint { 8 }
+ pub fn size() -> uint { 16 }
}
}
mod m {
#[cfg(target_arch = "x86_64")]
pub mod m {
- pub fn align() -> uint { 8u }
- pub fn size() -> uint { 16u }
+ pub fn align() -> uint { 8 }
+ pub fn size() -> uint { 16 }
}
}
mod m {
#[cfg(target_arch = "x86")]
pub mod m {
- pub fn align() -> uint { 8_usize }
- pub fn size() -> uint { 16_usize }
+ pub fn align() -> uint { 8 }
+ pub fn size() -> uint { 16 }
}
#[cfg(target_arch = "x86_64")]
pub mod m {
- pub fn align() -> uint { 8_usize }
- pub fn size() -> uint { 16_usize }
+ pub fn align() -> uint { 8 }
+ pub fn size() -> uint { 16 }
}
}
mod m {
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
pub mod m {
- pub fn align() -> uint { 8_usize }
- pub fn size() -> uint { 16_usize }
+ pub fn align() -> uint { 8 }
+ pub fn size() -> uint { 16 }
}
}
pub fn main() {
unsafe {
- let x = Outer {c8: 22u8, t: Inner {c64: 44u64}};
+ let x = Outer {c8: 22, t: Inner {c64: 44}};
let y = format!("{:?}", x);
}
pub fn main() {
- assert_eq!(m(t3::c(T2 {x: t1::a(10), y: 5}, 4_usize)), 10);
- assert_eq!(m(t3::c(T2 {x: t1::b(10_usize), y: 5}, 4_usize)), 19);
+ assert_eq!(m(t3::c(T2 {x: t1::a(10), y: 5}, 4)), 10);
+ assert_eq!(m(t3::c(T2 {x: t1::b(10), y: 5}, 4)), 19);
}
}
pub fn main() {
- let p: Box<_> = box 22_usize;
+ let p: Box<_> = box 22;
let r = foo(&*p);
println!("r={}", r);
- assert_eq!(r, 22_usize);
+ assert_eq!(r, 22);
}
}
pub fn main() {
- let p: Box<_> = box 3_usize;
+ let p: Box<_> = box 3;
let r = foo(&*p);
- assert_eq!(r, 3_usize);
+ assert_eq!(r, 3);
}
}
pub fn main() {
- let mut i = 3i32;
+ let mut i = 3;
assert_eq!(i, 3);
{
let cl = || i += 1;
fn bar(x: &uint) -> uint { *x }
pub fn main() {
- let p: Box<_> = box 3_usize;
+ let p: Box<_> = box 3;
assert_eq!(bar(foo(&*p)), 3);
}
}
pub fn main() {
- let x = 3_usize;
- assert_eq!(parameterized(&x), 3_usize);
+ let x = 3;
+ assert_eq!(parameterized(&x), 3);
}
// This version does not yet work (associated type issues)...
#[cfg(cannot_use_this_yet)]
fn foo<'a>(map: RefCell<HashMap<&'static str, &'a [u8]>>) {
- let one = [1_usize];
+ let one = [1];
assert_eq!(map.borrow().get("one"), Some(&one[..]));
}
// ... and this version does not work (the lifetime of `one` is
// supposed to match the lifetime `'a`) ...
fn foo<'a>(map: RefCell<HashMap<&'static str, &'a [u8]>>) {
- let one = [1_usize];
+ let one = [1];
assert_eq!(map.borrow().get("one"), Some(&one.as_slice()));
}
}
fn main() {
- let zer = [0u8];
- let one = [1u8];
- let two = [2u8];
+ let zer = [0];
+ let one = [1];
+ let two = [2];
let mut map = HashMap::new();
map.insert("zero", &zer[..]);
map.insert("one", &one[..]);
}
fn main() {
- let w = E { f: &10u8 };
+ let w = E { f: &10 };
let o = extension(&w);
- assert_eq!(o.n(), 10u8);
+ assert_eq!(o.n(), 10);
}
let fromp = CString::new(test_file.as_vec()).unwrap();
let modebuf = CString::new(b"w+b").unwrap();
let ostream = libc::fopen(fromp.as_ptr(), modebuf.as_ptr());
- assert!((ostream as uint != 0_usize));
+ assert!((ostream as uint != 0));
let s = "hello".to_string();
let buf = CString::new(b"hello").unwrap();
let write_len = libc::fwrite(buf.as_ptr() as *mut _,
fn my_err(s: String) -> ! { println!("{}", s); panic!(); }
fn okay(i: uint) -> int {
- if i == 3_usize {
+ if i == 3 {
my_err("I don't like three".to_string());
} else {
return 42;
}
}
-pub fn main() { okay(4_usize); }
+pub fn main() { okay(4); }
};
let me = &*args[0];
- let x: &[u8] = &[1u8];
+ let x: &[u8] = &[1];
pass(Command::new(me).arg(x).output().unwrap());
- let x: &[u8] = &[2u8];
+ let x: &[u8] = &[2];
pass(Command::new(me).arg(x).output().unwrap());
- let x: &[u8] = &[3u8];
+ let x: &[u8] = &[3];
pass(Command::new(me).arg(x).output().unwrap());
- let x: &[u8] = &[4u8];
+ let x: &[u8] = &[4];
pass(Command::new(me).arg(x).output().unwrap());
- let x: &[u8] = &[5u8];
+ let x: &[u8] = &[5];
pass(Command::new(me).arg(x).output().unwrap());
0
fn test<F>(f: F) -> uint where F: FnOnce(uint) -> uint {
- return f(22_usize);
+ return f(22);
}
pub fn main() {
- let y = test(|x| 4_usize * x);
- assert_eq!(y, 88_usize);
+ let y = test(|x| 4 * x);
+ assert_eq!(y, 88);
}
}
fn foo(p: &Panolpy) {
- assert_eq!(22_i32 >> p.i8, 11_i32);
- assert_eq!(22_i32 >> p.i16, 11_i32);
- assert_eq!(22_i32 >> p.i32, 11_i32);
- assert_eq!(22_i32 >> p.i64, 11_i32);
- assert_eq!(22_i32 >> p.isize, 11_i32);
+ assert_eq!(22 >> p.i8, 11);
+ assert_eq!(22 >> p.i16, 11);
+ assert_eq!(22 >> p.i32, 11);
+ assert_eq!(22 >> p.i64, 11);
+ assert_eq!(22 >> p.isize, 11);
- assert_eq!(22_i32 >> p.u8, 11_i32);
- assert_eq!(22_i32 >> p.u16, 11_i32);
- assert_eq!(22_i32 >> p.u32, 11_i32);
- assert_eq!(22_i32 >> p.u64, 11_i32);
- assert_eq!(22_i32 >> p.usize, 11_i32);
+ assert_eq!(22 >> p.u8, 11);
+ assert_eq!(22 >> p.u16, 11);
+ assert_eq!(22 >> p.u32, 11);
+ assert_eq!(22 >> p.u64, 11);
+ assert_eq!(22 >> p.usize, 11);
}
fn main() {
pub fn main() {
check!(Option<u8>, 2,
None, "None",
- Some(129u8), "Some(129)");
+ Some(129), "Some(129)");
check!(Option<i16>, 4,
None, "None",
- Some(-20000i16), "Some(-20000)");
+ Some(-20000), "Some(-20000)");
check!(Either<u8, i8>, 2,
- Either::Left(132u8), "Left(132)",
- Either::Right(-32i8), "Right(-32)");
+ Either::Left(132), "Left(132)",
+ Either::Right(-32), "Right(-32)");
check!(Either<u8, i16>, 4,
- Either::Left(132u8), "Left(132)",
- Either::Right(-20000i16), "Right(-20000)");
+ Either::Left(132), "Left(132)",
+ Either::Right(-20000), "Right(-20000)");
}
impl Foo for uint {
fn foo() -> uint {
- 5_usize
+ 5
}
}
}
// Make sure we properly handle repeated self-appends.
let mut a: String = "A".to_string();
let mut i = 20;
- let mut expected_len = 1_usize;
+ let mut expected_len = 1;
while i > 0 {
println!("{}", a.len());
assert_eq!(a.len(), expected_len);
a = format!("{}{}", a, a);
i -= 1;
- expected_len *= 2_usize;
+ expected_len *= 2;
}
}
fn test1() {
unsafe {
- let q = Quad { a: 0xaaaa_aaaa_aaaa_aaaa_u64,
- b: 0xbbbb_bbbb_bbbb_bbbb_u64,
- c: 0xcccc_cccc_cccc_cccc_u64,
- d: 0xdddd_dddd_dddd_dddd_u64 };
+ let q = Quad { a: 0xaaaa_aaaa_aaaa_aaaa,
+ b: 0xbbbb_bbbb_bbbb_bbbb,
+ c: 0xcccc_cccc_cccc_cccc,
+ d: 0xdddd_dddd_dddd_dddd };
let qq = rustrt::rust_dbg_abi_1(q);
println!("a: {:x}", qq.a as uint);
println!("b: {:x}", qq.b as uint);
println!("c: {:x}", qq.c as uint);
println!("d: {:x}", qq.d as uint);
- assert_eq!(qq.a, q.c + 1u64);
- assert_eq!(qq.b, q.d - 1u64);
- assert_eq!(qq.c, q.a + 1u64);
- assert_eq!(qq.d, q.b - 1u64);
+ assert_eq!(qq.a, q.c + 1);
+ assert_eq!(qq.b, q.d - 1);
+ assert_eq!(qq.c, q.a + 1);
+ assert_eq!(qq.d, q.b - 1);
}
}
fn test2() {
unsafe {
let f = Floats { a: 1.234567890e-15_f64,
- b: 0b_1010_1010_u8,
+ b: 0b_1010_1010,
c: 1.0987654321e-15_f64 };
let ff = rustrt::rust_dbg_abi_2(f);
println!("a: {}", ff.a as f64);
println!("b: {}", ff.b as uint);
println!("c: {}", ff.c as f64);
assert_eq!(ff.a, f.c + 1.0f64);
- assert_eq!(ff.b, 0xff_u8);
+ assert_eq!(ff.b, 0xff);
assert_eq!(ff.c, f.a - 1.0f64);
}
}
pub fn main() {
assert_eq!(line!(), 25);
- assert!((column!() == 4u32));
+ assert!((column!() == 4));
assert_eq!(indirect_line!(), 27);
assert!((file!().ends_with("syntax-extension-source-utils.rs")));
assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string());
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// ignore-linux #7340 fails on 32-bit Linux
-// ignore-macos #7340 fails on 32-bit macos
-
use std::mem;
enum Tag<A> {
}
fn mk_rec() -> Rec {
- return Rec { c8:0u8, t:Tag::Tag2(0u64) };
+ return Rec { c8:0, t:Tag::Tag2(0) };
}
-fn is_8_byte_aligned(u: &Tag<u64>) -> bool {
+fn is_u64_aligned(u: &Tag<u64>) -> bool {
let p: uint = unsafe { mem::transmute(u) };
- return (p & 7_usize) == 0_usize;
+ let u64_align = std::mem::min_align_of::<u64>();
+ return (p & (u64_align - 1)) == 0;
}
pub fn main() {
let x = mk_rec();
- assert!(is_8_byte_aligned(&x.t));
+ assert!(is_u64_aligned(&x.t));
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// ignore-linux #7340 fails on 32-bit Linux
-// ignore-macos #7340 fails on 32-bit macos
-
use std::mem;
enum Tag<A,B> {
}
fn mk_rec<A,B>(a: A, b: B) -> Rec<A,B> {
- Rec { chA:0u8, tA:Tag::VarA(a), chB:1u8, tB:Tag::VarB(b) }
+ Rec { chA:0, tA:Tag::VarA(a), chB:1, tB:Tag::VarB(b) }
}
fn is_aligned<A>(amnt: uint, u: &A) -> bool {
let p: uint = unsafe { mem::transmute(u) };
- return (p & (amnt-1_usize)) == 0_usize;
+ return (p & (amnt-1)) == 0;
}
fn variant_data_is_aligned<A,B>(amnt: uint, u: &Tag<A,B>) -> bool {
}
pub fn main() {
+ let u64_align = std::mem::min_align_of::<u64>();
let x = mk_rec(22u64, 23u64);
- assert!(is_aligned(8_usize, &x.tA));
- assert!(variant_data_is_aligned(8_usize, &x.tA));
- assert!(is_aligned(8_usize, &x.tB));
- assert!(variant_data_is_aligned(8_usize, &x.tB));
+ assert!(is_aligned(u64_align, &x.tA));
+ assert!(variant_data_is_aligned(u64_align, &x.tA));
+ assert!(is_aligned(u64_align, &x.tB));
+ assert!(variant_data_is_aligned(u64_align, &x.tB));
let x = mk_rec(22u64, 23u32);
- assert!(is_aligned(8_usize, &x.tA));
- assert!(variant_data_is_aligned(8_usize, &x.tA));
- assert!(is_aligned(8_usize, &x.tB));
- assert!(variant_data_is_aligned(4_usize, &x.tB));
+ assert!(is_aligned(u64_align, &x.tA));
+ assert!(variant_data_is_aligned(u64_align, &x.tA));
+ assert!(is_aligned(u64_align, &x.tB));
+ assert!(variant_data_is_aligned(4, &x.tB));
let x = mk_rec(22u32, 23u64);
- assert!(is_aligned(8_usize, &x.tA));
- assert!(variant_data_is_aligned(4_usize, &x.tA));
- assert!(is_aligned(8_usize, &x.tB));
- assert!(variant_data_is_aligned(8_usize, &x.tB));
+ assert!(is_aligned(u64_align, &x.tA));
+ assert!(variant_data_is_aligned(4, &x.tA));
+ assert!(is_aligned(u64_align, &x.tB));
+ assert!(variant_data_is_aligned(u64_align, &x.tB));
let x = mk_rec(22u32, 23u32);
- assert!(is_aligned(4_usize, &x.tA));
- assert!(variant_data_is_aligned(4_usize, &x.tA));
- assert!(is_aligned(4_usize, &x.tB));
- assert!(variant_data_is_aligned(4_usize, &x.tB));
+ assert!(is_aligned(4, &x.tA));
+ assert!(variant_data_is_aligned(4, &x.tA));
+ assert!(is_aligned(4, &x.tB));
+ assert!(variant_data_is_aligned(4, &x.tB));
let x = mk_rec(22f64, 23f64);
- assert!(is_aligned(8_usize, &x.tA));
- assert!(variant_data_is_aligned(8_usize, &x.tA));
- assert!(is_aligned(8_usize, &x.tB));
- assert!(variant_data_is_aligned(8_usize, &x.tB));
+ assert!(is_aligned(u64_align, &x.tA));
+ assert!(variant_data_is_aligned(u64_align, &x.tA));
+ assert!(is_aligned(u64_align, &x.tB));
+ assert!(variant_data_is_aligned(u64_align, &x.tB));
}
}
pub fn main() {
- let x = t_rec {c8: 22u8, t: a_tag::a_tag_var(44u64)};
+ let x = t_rec {c8: 22, t: a_tag::a_tag_var(44)};
let y = format!("{:?}", x);
println!("y = {:?}", y);
assert_eq!(y, "t_rec { c8: 22, t: a_tag_var(44) }".to_string());
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// ignore-linux #7340 fails on 32-bit Linux
-// ignore-macos #7340 fails on 32-bit macos
-
use std::mem;
enum Tag {
}
fn mk_rec() -> Rec {
- return Rec { c8:0u8, t:Tag::TagInner(0u64) };
+ return Rec { c8:0, t:Tag::TagInner(0) };
}
-fn is_8_byte_aligned(u: &Tag) -> bool {
+fn is_u64_aligned(u: &Tag) -> bool {
let p: uint = unsafe { mem::transmute(u) };
- return (p & 7_usize) == 0_usize;
+ let u64_align = std::mem::min_align_of::<u64>();
+ return (p & (u64_align - 1)) == 0;
}
pub fn main() {
let x = mk_rec();
- assert!(is_8_byte_aligned(&x.t));
+ assert!(is_u64_aligned(&x.t));
}
struct R {val0: int, val1: u8, val2: char}
let (tx, rx) = channel();
- let r0: R = R {val0: 0, val1: 1u8, val2: '2'};
+ let r0: R = R {val0: 0, val1: 1, val2: '2'};
tx.send(r0).unwrap();
let mut r1: R;
r1 = rx.recv().unwrap();
assert_eq!(r1.val0, 0);
- assert_eq!(r1.val1, 1u8);
+ assert_eq!(r1.val1, 1);
assert_eq!(r1.val2, '2');
}
let (tx, rx) = channel();
tx.send(t::tag1).unwrap();
tx.send(t::tag2(10)).unwrap();
- tx.send(t::tag3(10, 11u8, 'A')).unwrap();
+ tx.send(t::tag3(10, 11, 'A')).unwrap();
let mut t1: t;
t1 = rx.recv().unwrap();
assert_eq!(t1, t::tag1);
t1 = rx.recv().unwrap();
assert_eq!(t1, t::tag2(10));
t1 = rx.recv().unwrap();
- assert_eq!(t1, t::tag3(10, 11u8, 'A'));
+ assert_eq!(t1, t::tag3(10, 11, 'A'));
}
fn test_chan() {
let addr = rx.recv().unwrap();
let (tx, rx) = channel();
- for _ in 0_usize..1000 {
+ for _ in 0..1000 {
let tx = tx.clone();
Builder::new().stack_size(64 * 1024).spawn(move|| {
match TcpStream::connect(addr) {
// Wait for all clients to exit, but don't wait for the server to exit. The
// server just runs infinitely.
drop(tx);
- for _ in 0_usize..1000 {
+ for _ in 0..1000 {
rx.recv().unwrap();
}
unsafe { libc::exit(0) }
}
pub fn main () {
- assert_eq!(f::<f64, int>(0, 2_usize), 2_usize);
- assert_eq!(f::<uint, int>(0, 2_usize), 2_usize);
+ assert_eq!(f::<f64, int>(0, 2), 2);
+ assert_eq!(f::<uint, int>(0, 2), 2);
}
pub fn main() {
let a = box() () as Box<Trait<u8, u8>>;
- assert_eq!(a.method(Type::Constant((1u8, 2u8))), 0);
+ assert_eq!(a.method(Type::Constant((1, 2))), 0);
}
}
fn main() {
- let w = E { f: &10u8 };
+ let w = E { f: &10 };
let o = extension(&w);
- assert_eq!(o.n(), 10u8);
+ assert_eq!(o.n(), 10);
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test case where an associated type is referenced from within the
+// supertrait definition. Issue #20220.
+
+use std::vec::IntoIter;
+
+pub trait Foo: Iterator<Item=<Self as Foo>::Key> {
+ type Key;
+}
+
+impl Foo for IntoIter<i32> {
+ type Key = i32;
+}
+
+fn sum_foo<F:Foo<Key=i32>>(f: F) -> i32 {
+ f.fold(0, |a,b| a + b)
+}
+
+fn main() {
+ let x = sum_foo(vec![11, 10, 1].into_iter());
+ assert_eq!(x, 22);
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test stack overflow triggered by evaluating the implications. To be
+// WF, the type `Receipt<Complete>` would require that `<Complete as
+// Async>::Cancel` be WF. This normalizes to `Receipt<Complete>`
+// again, leading to an infinite cycle. Issue #23003.
+
+#![allow(dead_code)]
+#![allow(unused_variables)]
+
+use std::marker::PhantomData;
+
+trait Async {
+ type Cancel;
+}
+
+struct Receipt<A:Async> {
+ marker: PhantomData<A>,
+}
+
+struct Complete {
+ core: Option<()>,
+}
+
+impl Async for Complete {
+ type Cancel = Receipt<Complete>;
+}
+
+fn foo(r: Receipt<Complete>) { }
+
+fn main() { }
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test a case of a trait which extends the same supertrait twice, but
+// with difference type parameters. Test that we can invoke the
+// various methods in various ways successfully.
+// See also `compile-fail/trait-repeated-supertrait-ambig.rs`.
+
+trait CompareTo<T> {
+ fn same_as(&self, t: T) -> bool;
+}
+
+trait CompareToInts : CompareTo<i64> + CompareTo<u64> {
+}
+
+impl CompareTo<i64> for i64 {
+ fn same_as(&self, t: i64) -> bool { *self == t }
+}
+
+impl CompareTo<u64> for i64 {
+ fn same_as(&self, t: u64) -> bool { *self == (t as i64) }
+}
+
+impl CompareToInts for i64 { }
+
+fn with_obj(c: &CompareToInts) -> bool {
+ c.same_as(22_i64) && c.same_as(22_u64)
+}
+
+fn with_trait<C:CompareToInts>(c: &C) -> bool {
+ c.same_as(22_i64) && c.same_as(22_u64)
+}
+
+fn with_ufcs1<C:CompareToInts>(c: &C) -> bool {
+ CompareToInts::same_as(c, 22_i64) && CompareToInts::same_as(c, 22_u64)
+}
+
+fn with_ufcs2<C:CompareToInts>(c: &C) -> bool {
+ CompareTo::same_as(c, 22_i64) && CompareTo::same_as(c, 22_u64)
+}
+
+fn main() {
+ assert_eq!(22_i64.same_as(22_i64), true);
+ assert_eq!(22_i64.same_as(22_u64), true);
+ assert_eq!(with_trait(&22), true);
+ assert_eq!(with_obj(&22), true);
+ assert_eq!(with_ufcs1(&22), true);
+ assert_eq!(with_ufcs2(&22), true);
+}
fn range_<F>(lo: uint, hi: uint, mut it: F) where F: FnMut(uint) {
let mut lo_ = lo;
- while lo_ < hi { it(lo_); lo_ += 1_usize; }
+ while lo_ < hi { it(lo_); lo_ += 1; }
}
fn create_index<T>(_index: Vec<S<T>> , _hash_fn: extern fn(T) -> uint) {
- range_(0_usize, 256_usize, |_i| {
+ range_(0, 256, |_i| {
let _bucket: Vec<T> = Vec::new();
})
}
pub fn main() {
- let x: Vec<_> = (0_usize..5).collect();
+ let x: Vec<_> = (0..5).collect();
let expected: &[uint] = &[0,1,2,3,4];
assert_eq!(x, expected);
- let x = (0_usize..5).collect::<Vec<_>>();
+ let x = (0..5).collect::<Vec<_>>();
assert_eq!(x, expected);
let y: _ = "hello";
assert_eq!(y.len(), 5);
- let ptr = &5_usize;
+ let ptr = &5;
let ptr2 = ptr as *const _;
assert_eq!(ptr as *const uint as uint, ptr2 as uint);
pub fn main() {
- let mut word: u32 = 200000u32;
- word = word - 1u32;
- assert_eq!(word, 199999u32);
+ let mut word: u32 = 200000;
+ word = word - 1;
+ assert_eq!(word, 199999);
}
// These constants were chosen because they aren't used anywhere
// in the rest of the generated code so they're easily grep-able.
pub fn main() {
- let mut x: u8 = 19u8; // 0x13
+ let mut x: u8 = 19; // 0x13
- let mut y: u8 = 35u8; // 0x23
+ let mut y: u8 = 35; // 0x23
- x = x + 7u8; // 0x7
+ x = x + 7; // 0x7
- y = y - 9u8; // 0x9
+ y = y - 9; // 0x9
assert_eq!(x, y);
}
pub fn main() {
- let mut x: u8 = 12u8;
- let y: u8 = 12u8;
- x = x + 1u8;
- x = x - 1u8;
+ let mut x: u8 = 12;
+ let y: u8 = 12;
+ x = x + 1;
+ x = x - 1;
assert_eq!(x, y);
- // x = 14u8;
- // x = x + 1u8;
+ // x = 14;
+ // x = x + 1;
}
}
fn main() {
- let a: &Foo = &22_i32;
+ let a: &Foo = &22;
assert_eq!(Foo::test(a), 22);
}
//
// error: internal compiler error: get_unique_type_id_of_type() -
// unexpected type: closure,
-// ty_closure(syntax::ast::DefId{krate: 0u32, node: 66u32},
-// ReScope(63u32))
+// ty_closure(syntax::ast::DefId{krate: 0, node: 66},
+// ReScope(63))
//
// This is a regression test for issue #17021.
//
}
pub fn main() {
- let mut a = 7_usize;
+ let mut a = 7;
let b = &mut a;
replace_map(b, |x: uint| x * 2);
- assert_eq!(*b, 14_usize);
+ assert_eq!(*b, 14);
}
enum bar { u(Box<Foo>), w(int), }
pub fn main() {
- assert!(match bar::u(box Foo{a: 10, b: 40_usize}) {
+ assert!(match bar::u(box Foo{a: 10, b: 40}) {
bar::u(box Foo{a: a, b: b}) => { a + (b as int) }
_ => { 66 }
} == 50);
pub fn main() {
let (tx, rx) = channel();
- let n = 100_usize;
- let mut expected = 0_usize;
- let _t = (0_usize..n).map(|i| {
+ let n = 100;
+ let mut expected = 0;
+ let _t = (0..n).map(|i| {
expected += i;
let tx = tx.clone();
thread::spawn(move|| {
})
}).collect::<Vec<_>>();
- let mut actual = 0_usize;
- for _ in 0_usize..n {
+ let mut actual = 0;
+ for _ in 0..n {
let j = rx.recv().unwrap();
actual += *j;
}
f: [T; 3]
}
- let data: Box<_> = box Foo_{f: [1i32, 2, 3] };
+ let data: Box<Foo_<i32>> = box Foo_{f: [1, 2, 3] };
let x: &Foo<i32> = mem::transmute(raw::Slice { len: 3, data: &*data });
assert!(x.f.len() == 3);
assert!(x.f[0] == 1);
- assert!(x.f[1] == 2);
- assert!(x.f[2] == 3);
struct Baz_ {
f1: uint,
let s: String = chs.iter().cloned().collect();
let schs: Vec<char> = s.chars().collect();
- assert!(s.len() == 10_usize);
- assert!(s.chars().count() == 4_usize);
- assert!(schs.len() == 4_usize);
+ assert!(s.len() == 10);
+ assert!(s.chars().count() == 4);
+ assert!(schs.len() == 4);
assert!(schs.iter().cloned().collect::<String>() == s);
- assert!(s.char_at(0_usize) == 'e');
- assert!(s.char_at(1_usize) == 'é');
+ assert!(s.char_at(0) == 'e');
+ assert!(s.char_at(1) == 'é');
assert!((str::from_utf8(s.as_bytes()).is_ok()));
// invalid prefix
- assert!((!str::from_utf8(&[0x80_u8]).is_ok()));
+ assert!((!str::from_utf8(&[0x80]).is_ok()));
// invalid 2 byte prefix
- assert!((!str::from_utf8(&[0xc0_u8]).is_ok()));
- assert!((!str::from_utf8(&[0xc0_u8, 0x10_u8]).is_ok()));
+ assert!((!str::from_utf8(&[0xc0]).is_ok()));
+ assert!((!str::from_utf8(&[0xc0, 0x10]).is_ok()));
// invalid 3 byte prefix
- assert!((!str::from_utf8(&[0xe0_u8]).is_ok()));
- assert!((!str::from_utf8(&[0xe0_u8, 0x10_u8]).is_ok()));
- assert!((!str::from_utf8(&[0xe0_u8, 0xff_u8, 0x10_u8]).is_ok()));
+ assert!((!str::from_utf8(&[0xe0]).is_ok()));
+ assert!((!str::from_utf8(&[0xe0, 0x10]).is_ok()));
+ assert!((!str::from_utf8(&[0xe0, 0xff, 0x10]).is_ok()));
// invalid 4 byte prefix
- assert!((!str::from_utf8(&[0xf0_u8]).is_ok()));
- assert!((!str::from_utf8(&[0xf0_u8, 0x10_u8]).is_ok()));
- assert!((!str::from_utf8(&[0xf0_u8, 0xff_u8, 0x10_u8]).is_ok()));
- assert!((!str::from_utf8(&[0xf0_u8, 0xff_u8, 0xff_u8, 0x10_u8]).is_ok()));
+ assert!((!str::from_utf8(&[0xf0]).is_ok()));
+ assert!((!str::from_utf8(&[0xf0, 0x10]).is_ok()));
+ assert!((!str::from_utf8(&[0xf0, 0xff, 0x10]).is_ok()));
+ assert!((!str::from_utf8(&[0xf0, 0xff, 0xff, 0x10]).is_ok()));
}
assert_eq!(x[2], 3);
assert_eq!(x[3], 4);
- assert_eq!(size_of::<[u8; 4]>(), 4_usize);
+ assert_eq!(size_of::<[u8; 4]>(), 4);
// FIXME #10183
// FIXME #18069
//if cfg!(target_pointer_width = "64") {
- // assert_eq!(size_of::<[u8; (1 << 32)]>(), (1_usize << 32));
+ // assert_eq!(size_of::<[u8; (1 << 32)]>(), (1 << 32));
//}
}
fn p() -> bool { true }
let _a = (assert!((true)) == (assert!(p())));
let _c = (assert!((p())) == ());
- let _b: bool = (println!("{}", 0) == (return 0_usize));
+ let _b: bool = (println!("{}", 0) == (return 0));
}
fn angrydome() {
}
fn main() {
- let x = 42u32;
+ let x = 42;
foo1(&x);
foo2(&x);
unsafe {
#[cfg(windows)]
pub fn main() {
unsafe {
- let expected = 1234_usize;
+ let expected = 1234;
kernel32::SetLastError(expected);
let actual = kernel32::GetLastError();
println!("actual = {}", actual);
#[cfg(windows)]
pub fn main() {
let heap = unsafe { kernel32::GetProcessHeap() };
- let mem = unsafe { kernel32::HeapAlloc(heap, 0u32, 100u32) };
- assert!(mem != 0_usize);
- let res = unsafe { kernel32::HeapFree(heap, 0u32, mem) };
- assert!(res != 0u8);
+ let mem = unsafe { kernel32::HeapAlloc(heap, 0, 100) };
+ assert!(mem != 0);
+ let res = unsafe { kernel32::HeapFree(heap, 0, mem) };
+ assert!(res != 0);
}
#[cfg(not(windows))]