Longer version:
-The Rust Project is copyright 2014, The Rust Project
+The Rust Project is copyright 2015, The Rust Project
Developers (given in the file AUTHORS.txt).
Licensed under the Apache License, Version 2.0
1. Make sure you have installed the dependencies:
* `g++` 4.7 or `clang++` 3.x
* `python` 2.6 or later (but not 3.x)
- * `perl` 5.0 or later
* GNU `make` 3.81 or later
* `curl`
* `git`
opt libcpp 1 "build with llvm with libc++ instead of libstdc++ when using clang"
opt llvm-assertions 1 "build LLVM with assertions"
opt debug 1 "build with extra debug fun"
-opt ratchet-bench 0 "ratchet benchmarks"
opt fast-make 0 "use .gitmodules as timestamp for submodule deps"
opt ccache 0 "invoke gcc/clang via ccache to reuse object files between builds"
opt local-rust 0 "use an installed rustc rather than downloading a snapshot"
step_msg "looking for build programs"
-probe_need CFG_PERL perl
probe_need CFG_CURLORWGET curl wget
probe_need CFG_PYTHON python2.7 python2.6 python2 python
done
# Munge any paths that appear in config.mk back to posix-y
-perl -i.bak -p -e 's@ ([a-zA-Z]):[/\\]@ /\1/@go;' config.tmp
+sed -i.bak -e 's@ \([a-zA-Z]\):[/\\]@ /\1/@g;' config.tmp
rm -f config.tmp.bak
msg
--plugin-path <val>
directory to load plugins from (default: /tmp/rustdoc_ng/plugins)
.TP
+--target <val>
+target triple to document
+.TP
+--crate-name <val>
+specify the name of this crate
+.TP
-L --library-path <val>
directory to add to crate search path
.TP
+--cfg <val>
+pass a --cfg to rustc
+.TP
+--extern <val>
+pass an --extern to rustc
+.TP
+--test
+run code examples as tests
+.TP
+--test-args <val>
+pass arguments to the test runner
+.TP
--html-in-header <val>
file to add to <head>
.TP
--html-after-content <val>
file to add in <body>, after content
.TP
+--markdown-css <val>
+CSS files to include via <link> in a rendered Markdown file
+.TP
+--markdown-playground-url <val>
+URL to send code snippets to
+.TP
+--markdown-no-toc
+don't include table of contents
+.TP
-h, --help
Print help
+.TP
+-V, --version
+Print rustdoc's version
.SH "OUTPUT FORMATS"
--- /dev/null
+# aarch64-linux-android configuration
+# CROSS_PREFIX_aarch64-linux-android-
+CC_aarch64-linux-android=$(CFG_ANDROID_CROSS_PATH)/bin/aarch64-linux-android-gcc
+CXX_aarch64-linux-android=$(CFG_ANDROID_CROSS_PATH)/bin/aarch64-linux-android-g++
+CPP_aarch64-linux-android=$(CFG_ANDROID_CROSS_PATH)/bin/aarch64-linux-android-gcc -E
+AR_aarch64-linux-android=$(CFG_ANDROID_CROSS_PATH)/bin/aarch64-linux-android-ar
+CFG_LIB_NAME_aarch64-linux-android=lib$(1).so
+CFG_STATIC_LIB_NAME_aarch64-linux-android=lib$(1).a
+CFG_LIB_GLOB_aarch64-linux-android=lib$(1)-*.so
+CFG_LIB_DSYM_GLOB_aarch64-linux-android=lib$(1)-*.dylib.dSYM
+CFG_JEMALLOC_CFLAGS_aarch64-linux-android := -D__aarch64__ -DANDROID -D__ANDROID__ $(CFLAGS)
+CFG_GCCISH_CFLAGS_aarch64-linux-android := -Wall -g -fPIC -D__aarch64__ -DANDROID -D__ANDROID__ $(CFLAGS)
+CFG_GCCISH_CXXFLAGS_aarch64-linux-android := -fno-rtti $(CXXFLAGS)
+CFG_GCCISH_LINK_FLAGS_aarch64-linux-android := -shared -fPIC -ldl -g -lm -lsupc++
+CFG_GCCISH_DEF_FLAG_aarch64-linux-android := -Wl,--export-dynamic,--dynamic-list=
+CFG_GCCISH_PRE_LIB_FLAGS_aarch64-linux-android := -Wl,-whole-archive
+CFG_GCCISH_POST_LIB_FLAGS_aarch64-linux-android := -Wl,-no-whole-archive
+CFG_DEF_SUFFIX_aarch64-linux-android := .android.def
+CFG_LLC_FLAGS_aarch64-linux-android :=
+CFG_INSTALL_NAME_aarch64-linux-android =
+CFG_EXE_SUFFIX_aarch64-linux-android :=
+CFG_WINDOWSY_aarch64-linux-android :=
+CFG_UNIXY_aarch64-linux-android := 1
+CFG_PATH_MUNGE_aarch64-linux-android := true
+CFG_LDPATH_aarch64-linux-android :=
+CFG_RUN_aarch64-linux-android=
+CFG_RUN_TARG_aarch64-linux-android=
+RUSTC_FLAGS_aarch64-linux-android :=
+RUSTC_CROSS_FLAGS_aarch64-linux-android :=
+CFG_GNU_TRIPLE_aarch64-linux-android := aarch64-linux-android
CFG_STATIC_LIB_NAME_i686-pc-windows-gnu=$(1).lib
CFG_LIB_GLOB_i686-pc-windows-gnu=$(1)-*.dll
CFG_LIB_DSYM_GLOB_i686-pc-windows-gnu=$(1)-*.dylib.dSYM
-CFG_JEMALLOC_CFLAGS_i686-pc-windows-gnu := -march=i686 -m32 -D_WIN32_WINNT=0x0600 $(CFLAGS)
-CFG_GCCISH_CFLAGS_i686-pc-windows-gnu := -Wall -Werror -g -m32 -D_WIN32_WINNT=0x0600 $(CFLAGS)
+CFG_JEMALLOC_CFLAGS_i686-pc-windows-gnu := -march=i686 -m32 -D_WIN32_WINNT=0x0600 -D__USE_MINGW_ANSI_STDIO=1 $(CFLAGS)
+CFG_GCCISH_CFLAGS_i686-pc-windows-gnu := -Wall -Werror -g -m32 -D_WIN32_WINNT=0x0600 -D__USE_MINGW_ANSI_STDIO=1 $(CFLAGS)
CFG_GCCISH_CXXFLAGS_i686-pc-windows-gnu := -fno-rtti $(CXXFLAGS)
CFG_GCCISH_LINK_FLAGS_i686-pc-windows-gnu := -shared -g -m32
CFG_GCCISH_DEF_FLAG_i686-pc-windows-gnu :=
CFG_WINDOWSY_i686-pc-windows-gnu := 1
CFG_UNIXY_i686-pc-windows-gnu :=
CFG_PATH_MUNGE_i686-pc-windows-gnu :=
-CFG_LDPATH_i686-pc-windows-gnu :=$(CFG_LDPATH_i686-pc-windows-gnu):$(PATH)
-CFG_RUN_i686-pc-windows-gnu=PATH="$(CFG_LDPATH_i686-pc-windows-gnu):$(1)" $(2)
-CFG_RUN_TARG_i686-pc-windows-gnu=$(call CFG_RUN_i686-pc-windows-gnu,$(HLIB$(1)_H_$(CFG_BUILD)),$(2))
+CFG_LDPATH_i686-pc-windows-gnu :=
+CFG_RUN_i686-pc-windows-gnu=$(2)
+CFG_RUN_TARG_i686-pc-windows-gnu=$(call CFG_RUN_i686-pc-windows-gnu,,$(2))
CFG_GNU_TRIPLE_i686-pc-windows-gnu := i686-w64-mingw32
CFG_STATIC_LIB_NAME_x86_64-pc-windows-gnu=$(1).lib
CFG_LIB_GLOB_x86_64-pc-windows-gnu=$(1)-*.dll
CFG_LIB_DSYM_GLOB_x86_64-pc-windows-gnu=$(1)-*.dylib.dSYM
-CFG_JEMALLOC_CFLAGS_x86_64-pc-windows-gnu := -m64 -D_WIN32_WINNT=0x0600 $(CFLAGS)
-CFG_GCCISH_CFLAGS_x86_64-pc-windows-gnu := -Wall -Werror -g -m64 -D_WIN32_WINNT=0x0600 $(CFLAGS)
+CFG_JEMALLOC_CFLAGS_x86_64-pc-windows-gnu := -m64 -D_WIN32_WINNT=0x0600 -D__USE_MINGW_ANSI_STDIO=1 $(CFLAGS)
+CFG_GCCISH_CFLAGS_x86_64-pc-windows-gnu := -Wall -Werror -g -m64 -D_WIN32_WINNT=0x0600 -D__USE_MINGW_ANSI_STDIO=1 $(CFLAGS)
CFG_GCCISH_CXXFLAGS_x86_64-pc-windows-gnu := -fno-rtti $(CXXFLAGS)
CFG_GCCISH_LINK_FLAGS_x86_64-pc-windows-gnu := -shared -g -m64
CFG_GCCISH_DEF_FLAG_x86_64-pc-windows-gnu :=
CFG_WINDOWSY_x86_64-pc-windows-gnu := 1
CFG_UNIXY_x86_64-pc-windows-gnu :=
CFG_PATH_MUNGE_x86_64-pc-windows-gnu :=
-CFG_LDPATH_x86_64-pc-windows-gnu :=$(CFG_LDPATH_x86_64-pc-windows-gnu):$(PATH)
-CFG_RUN_x86_64-pc-windows-gnu=PATH="$(CFG_LDPATH_x86_64-pc-windows-gnu):$(1)" $(2)
-CFG_RUN_TARG_x86_64-pc-windows-gnu=$(call CFG_RUN_x86_64-pc-windows-gnu,$(HLIB$(1)_H_$(CFG_BUILD)),$(2))
+CFG_LDPATH_x86_64-pc-windows-gnu :=
+CFG_RUN_x86_64-pc-windows-gnu=$(2)
+CFG_RUN_TARG_x86_64-pc-windows-gnu=$(call CFG_RUN_x86_64-pc-windows-gnu,,$(2))
CFG_GNU_TRIPLE_x86_64-pc-windows-gnu := x86_64-w64-mingw32
JEMALLOC_ARGS_$(1) := --disable-tls
else ifeq ($(OSTYPE_$(1)), linux-androideabi)
JEMALLOC_ARGS_$(1) := --disable-tls
+else ifeq ($(OSTYPE_$(1)), linux-android)
+ JEMALLOC_ARGS_$(1) := --disable-tls
endif
################################################################################
TESTARGS += --ignored
endif
-TEST_BENCH =
# Arguments to the cfail/rfail/rpass/bench tests
ifdef CFG_VALGRIND
CTEST_RUNTOOL = --runtool "$(CFG_VALGRIND)"
- TEST_BENCH =
endif
ifdef PLEASE_BENCH
- TEST_BENCH = --bench
+ TESTARGS += --bench
endif
# Arguments to the perf tests
| grep '^$(S)src/libbacktrace' -v \
| grep '^$(S)src/rust-installer' -v \
| xargs $(CFG_PYTHON) $(S)src/etc/check-binaries.py
+ $(Q) $(CFG_PYTHON) $(S)src/etc/errorck.py $(S)src/
endif
// Write out a parseable log of tests that were run
pub logfile: Option<Path>,
- // Write out a json file containing any metrics of the run
- pub save_metrics: Option<Path>,
-
- // Write and ratchet a metrics file
- pub ratchet_metrics: Option<Path>,
-
- // Percent change in metrics to consider noise
- pub ratchet_noise_percent: Option<f64>,
-
- // "Shard" of the testsuite to pub run: this has the form of
- // two numbers (a,b), and causes only those tests with
- // positional order equal to a mod b to run.
- pub test_shard: Option<(uint,uint)>,
-
// A command line to prefix program execution with,
// for running under valgrind
pub runtool: Option<String>,
optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS"),
optflag("", "verbose", "run tests verbosely, showing all output"),
optopt("", "logfile", "file to log test execution to", "FILE"),
- optopt("", "save-metrics", "file to save metrics to", "FILE"),
- optopt("", "ratchet-metrics", "file to ratchet metrics against", "FILE"),
- optopt("", "ratchet-noise-percent",
- "percent change in metrics to consider noise", "N"),
optflag("", "jit", "run tests under the JIT"),
optopt("", "target", "the target to build for", "TARGET"),
optopt("", "host", "the host to build for", "HOST"),
optopt("", "adb-path", "path to the android debugger", "PATH"),
optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"),
optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH"),
- optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite", "A.B"),
optflag("h", "help", "show this message"));
assert!(!args.is_empty());
filter: filter,
cfail_regex: Regex::new(errors::EXPECTED_PATTERN).unwrap(),
logfile: matches.opt_str("logfile").map(|s| Path::new(s)),
- save_metrics: matches.opt_str("save-metrics").map(|s| Path::new(s)),
- ratchet_metrics:
- matches.opt_str("ratchet-metrics").map(|s| Path::new(s)),
- ratchet_noise_percent:
- matches.opt_str("ratchet-noise-percent")
- .and_then(|s| s.as_slice().parse::<f64>()),
runtool: matches.opt_str("runtool"),
host_rustcflags: matches.opt_str("host-rustcflags"),
target_rustcflags: matches.opt_str("target-rustcflags"),
opt_str2(matches.opt_str("adb-test-dir")).as_slice() &&
!opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
lldb_python_dir: matches.opt_str("lldb-python-dir"),
- test_shard: test::opt_shard(matches.opt_str("test-shard")),
verbose: matches.opt_present("verbose"),
}
}
logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir));
logv(c, format!("adb_device_status: {}",
config.adb_device_status));
- match config.test_shard {
- None => logv(c, "test_shard: (all)".to_string()),
- Some((a,b)) => logv(c, format!("test_shard: {}.{}", a, b))
- }
logv(c, format!("verbose: {}", config.verbose));
logv(c, format!("\n"));
}
logfile: config.logfile.clone(),
run_tests: true,
run_benchmarks: true,
- ratchet_metrics: config.ratchet_metrics.clone(),
- ratchet_noise_percent: config.ratchet_noise_percent.clone(),
- save_metrics: config.save_metrics.clone(),
- test_shard: config.test_shard.clone(),
nocapture: false,
color: test::AutoColor,
- show_boxplot: false,
- boxplot_width: 50,
- show_all_stats: false,
}
}
% The (old) Rust Crates and Modules Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/crates-and-modules.html).
% Error Handling in Rust
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/error-handling.html).
% The (old) Rust Foreign Function Interface Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/ffi.html).
% The (old) Rust Macros Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/macros.html).
% The (old) Rust Ownership Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/ownership.html).
% The (old) Rust Compiler Plugins Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/plugins.html).
% The (old) Rust Pointer Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/pointers.html).
% The (old) Guide to Rust Strings
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/strings.html).
% The (old) Rust Threads and Communication Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/tasks.html).
% The (old) Rust Testing Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/testing.html).
% Writing Safe Low-level and Unsafe Code in Rust
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/unsafe.html).
% The (old) Rust Guide
-This content has moved into the
+This content has moved into
[the Rust Programming Language book](book/README.html).
| [Number literals](#number-literals)`*` | Example | Exponentiation | Suffixes |
|----------------------------------------|---------|----------------|----------|
-| Decimal integer | `98_222i` | `N/A` | Integer suffixes |
-| Hex integer | `0xffi` | `N/A` | Integer suffixes |
-| Octal integer | `0o77i` | `N/A` | Integer suffixes |
-| Binary integer | `0b1111_0000i` | `N/A` | Integer suffixes |
+| Decimal integer | `98_222is` | `N/A` | Integer suffixes |
+| Hex integer | `0xffis` | `N/A` | Integer suffixes |
+| Octal integer | `0o77is` | `N/A` | Integer suffixes |
+| Binary integer | `0b1111_0000is` | `N/A` | Integer suffixes |
| Floating-point | `123.0E+77f64` | `Optional` | Floating-point suffixes |
`*` All number literals allow `_` as a visual separator: `1_234.0E+18f64`
##### Suffixes
| Integer | Floating-point |
|---------|----------------|
-| `i` (`int`), `u` (`uint`), `u8`, `i8`, `u16`, `i16`, `u32`, `i32`, `u64`, `i64` | `f32`, `f64` |
+| `is` (`isize`), `us` (`usize`), `u8`, `i8`, `u16`, `i16`, `u32`, `i32`, `u64`, `i64` | `f32`, `f64` |
#### Character and string literals
without any spaces) by an _integer suffix_, which forcibly sets the
type of the literal. There are 10 valid values for an integer suffix:
-* The `i` and `u` suffixes give the literal type `int` or `uint`,
+* The `is` and `us` suffixes give the literal type `isize` or `usize`,
respectively.
* Each of the signed and unsigned machine types `u8`, `i8`,
`u16`, `i16`, `u32`, `i32`, `u64` and `i64`
Examples of integer literals of various forms:
```
-123i; // type int
-123u; // type uint
-123_u; // type uint
+123is; // type isize
+123us; // type usize
+123_us; // type usize
0xff_u8; // type u8
0o70_i16; // type i16
0b1111_1111_1001_0000_i32; // type i32
# struct HashMap<K, V>;
# fn f() {
# fn id<T>(t: T) -> T { t }
-type T = HashMap<int,String>; // Type arguments used in a type expression
-let x = id::<int>(10); // Type arguments used in a call expression
+type T = HashMap<i32,String>; // Type arguments used in a type expression
+let x = id::<i32>(10); // Type arguments used in a call expression
# }
```
| '*' ] ] ?
| '{' path_item [ ',' path_item ] * '}' ;
-path_item : ident | "mod" ;
+path_item : ident | "self" ;
```
A _use declaration_ creates one or more local name bindings synonymous with
* Binding all paths matching a given prefix, using the asterisk wildcard syntax
`use a::b::*;`
* Simultaneously binding a list of paths differing only in their final element
- and their immediate parent module, using the `mod` keyword, such as
- `use a::b::{mod, c, d};`
+ and their immediate parent module, using the `self` keyword, such as
+ `use a::b::{self, c, d};`
An example of `use` declarations:
```
use std::iter::range_step;
use std::option::Option::{Some, None};
-use std::collections::hash_map::{mod, HashMap};
+use std::collections::hash_map::{self, HashMap};
fn foo<T>(_: T){}
-fn bar(map1: HashMap<String, uint>, map2: hash_map::HashMap<String, uint>){}
+fn bar(map1: HashMap<String, usize>, map2: hash_map::HashMap<String, usize>){}
fn main() {
- // Equivalent to 'std::iter::range_step(0u, 10u, 2u);'
- range_step(0u, 10u, 2u);
+ // Equivalent to 'std::iter::range_step(0us, 10, 2);'
+ range_step(0us, 10, 2);
// Equivalent to 'foo(vec![std::option::Option::Some(1.0f64),
// std::option::Option::None]);'
An example of a function:
```
-fn add(x: int, y: int) -> int {
+fn add(x: i32, y: i32) -> i32 {
return x + y;
}
```
pattern that is valid in a let binding is also valid as an argument.
```
-fn first((value, _): (int, int)) -> int { value }
+fn first((value, _): (i32, i32)) -> i32 { value }
```
When a generic function is referenced, its type is instantiated based on the
context of the reference. For example, calling the `iter` function defined
-above on `[1, 2]` will instantiate type parameter `T` with `int`, and require
-the closure parameter to have type `fn(int)`.
+above on `[1, 2]` will instantiate type parameter `T` with `isize`, and require
+the closure parameter to have type `fn(isize)`.
The type parameters can also be explicitly supplied in a trailing
[path](#paths) component after the function name. This might be necessary if
```
# fn my_err(s: &str) -> ! { panic!() }
-fn f(i: int) -> int {
+fn f(i: i32) -> i32 {
if i == 42 {
return 42;
}
```
This will not compile without the `!` annotation on `my_err`, since the `else`
-branch of the conditional in `f` does not return an `int`, as required by the
+branch of the conditional in `f` does not return an `i32`, as required by the
signature of `f`. Adding the `!` annotation to `my_err` informs the
typechecker that, should control ever enter `my_err`, no further type judgments
about `f` need to hold, since control will never resume in any context that
```
// Declares an extern fn, the ABI defaults to "C"
-extern fn new_int() -> int { 0 }
+extern fn new_i32() -> i32 { 0 }
// Declares an extern fn with "stdcall" ABI
-extern "stdcall" fn new_int_stdcall() -> int { 0 }
+extern "stdcall" fn new_i32_stdcall() -> i32 { 0 }
```
Unlike normal functions, extern fns have an `extern "ABI" fn()`. This is the
same type as the functions declared in an extern block.
```
-# extern fn new_int() -> int { 0 }
-let fptr: extern "C" fn() -> int = new_int;
+# extern fn new_i32() -> i32 { 0 }
+let fptr: extern "C" fn() -> i32 = new_i32;
```
Extern functions may be called directly from Rust code as Rust uses large,
An example of a `struct` item and its use:
```
-struct Point {x: int, y: int}
+struct Point {x: i32, y: i32}
let p = Point {x: 10, y: 11};
-let px: int = p.x;
+let px: i32 = p.x;
```
A _tuple structure_ is a nominal [tuple type](#tuple-types), also defined with
the keyword `struct`. For example:
```
-struct Point(int, int);
+struct Point(i32, i32);
let p = Point(10, 11);
-let px: int = match p { Point(x, _) => x };
+let px: i32 = match p { Point(x, _) => x };
```
A _unit-like struct_ is a structure without any fields, defined by leaving off
the `static` lifetime, fixed-size arrays, tuples, enum variants, and structs.
```
-const BIT1: uint = 1 << 0;
-const BIT2: uint = 1 << 1;
+const BIT1: u32 = 1 << 0;
+const BIT2: u32 = 1 << 1;
-const BITS: [uint; 2] = [BIT1, BIT2];
+const BITS: [u32; 2] = [BIT1, BIT2];
const STRING: &'static str = "bitstring";
struct BitsNStrings<'a> {
- mybits: [uint; 2],
+ mybits: [u32; 2],
mystring: &'a str
}
data are being stored, or single-address and mutability properties are required.
```
-use std::sync::atomic::{AtomicUint, Ordering, ATOMIC_UINT_INIT};;
+use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
-// Note that ATOMIC_UINT_INIT is a *const*, but it may be used to initialize a
+// Note that ATOMIC_USIZE_INIT is a *const*, but it may be used to initialize a
// static. This static can be modified, so it is not placed in read-only memory.
-static COUNTER: AtomicUint = ATOMIC_UINT_INIT;
+static COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
// This table is a candidate to be placed in read-only memory.
-static TABLE: &'static [uint] = &[1, 2, 3, /* ... */];
+static TABLE: &'static [usize] = &[1, 2, 3, /* ... */];
for slot in TABLE.iter() {
println!("{}", slot);
libraries and can also be bound from C libraries (in an `extern` block).
```
-# fn atomic_add(_: &mut uint, _: uint) -> uint { 2 }
+# fn atomic_add(_: &mut u32, _: u32) -> u32 { 2 }
-static mut LEVELS: uint = 0;
+static mut LEVELS: u32 = 0;
// This violates the idea of no shared state, and this doesn't internally
// protect against races, so this function is `unsafe`
-unsafe fn bump_levels_unsafe1() -> uint {
+unsafe fn bump_levels_unsafe1() -> u32 {
let ret = LEVELS;
LEVELS += 1;
return ret;
// Assuming that we have an atomic_add function which returns the old value,
// this function is "safe" but the meaning of the return value may not be what
// callers expect, so it's still marked as `unsafe`
-unsafe fn bump_levels_unsafe2() -> uint {
+unsafe fn bump_levels_unsafe2() -> u32 {
return atomic_add(&mut LEVELS, 1);
}
```
[implementations](#implementations).
```
-# type Surface = int;
-# type BoundingBox = int;
+# type Surface = i32;
+# type BoundingBox = i32;
trait Shape {
fn draw(&self, Surface);
fn bounding_box(&self) -> BoundingBox;
```
trait Seq<T> {
- fn len(&self) -> uint;
- fn elt_at(&self, n: uint) -> T;
+ fn len(&self) -> u32;
+ fn elt_at(&self, n: u32) -> T;
fn iter<F>(&self, F) where F: Fn(T);
}
```
called on values that have the parameter's type. For example:
```
-# type Surface = int;
+# type Surface = i32;
# trait Shape { fn draw(&self, Surface); }
fn draw_twice<T: Shape>(surface: Surface, sh: T) {
sh.draw(surface);
```
# trait Shape { }
-# impl Shape for int { }
-# let mycircle = 0i;
+# impl Shape for i32 { }
+# let mycircle = 0i32;
let myshape: Box<Shape> = Box::new(mycircle) as Box<Shape>;
```
```
trait Num {
- fn from_int(n: int) -> Self;
+ fn from_i32(n: i32) -> Self;
}
impl Num for f64 {
- fn from_int(n: int) -> f64 { n as f64 }
+ fn from_i32(n: i32) -> f64 { n as f64 }
}
-let x: f64 = Num::from_int(42);
+let x: f64 = Num::from_i32(42);
```
Traits may inherit from other traits. For example, in
```{.ignore}
# trait Shape { fn area(&self) -> f64; }
# trait Circle : Shape { fn radius(&self) -> f64; }
-# impl Shape for int { fn area(&self) -> f64 { 0.0 } }
-# impl Circle for int { fn radius(&self) -> f64 { 0.0 } }
-# let mycircle = 0;
+# impl Shape for i32 { fn area(&self) -> f64 { 0.0 } }
+# impl Circle for i32 { fn radius(&self) -> f64 { 0.0 } }
+# let mycircle = 0i32;
let mycircle = Box::new(mycircle) as Box<Circle>;
let nonsense = mycircle.radius() * mycircle.area();
```
```
# struct Point {x: f64, y: f64};
# impl Copy for Point {}
-# type Surface = int;
+# type Surface = i32;
# struct BoundingBox {x: f64, y: f64, width: f64, height: f64};
# trait Shape { fn draw(&self, Surface); fn bounding_box(&self) -> BoundingBox; }
# fn do_draw_circle(s: Surface, c: Circle) { }
in the same module or a sub-module as the `self` type:
```
-struct Point {x: int, y: int}
+struct Point {x: i32, y: i32}
impl Point {
fn log(&self) {
// Declare a public struct with a private field
pub struct Bar {
- field: int
+ field: i32
}
// Declare a public enum with two public variants
mod m1 {
// Missing documentation is ignored here
#[allow(missing_docs)]
- pub fn undocumented_one() -> int { 1 }
+ pub fn undocumented_one() -> i32 { 1 }
// Missing documentation signals a warning here
#[warn(missing_docs)]
- pub fn undocumented_too() -> int { 2 }
+ pub fn undocumented_too() -> i32 { 2 }
// Missing documentation signals an error here
#[deny(missing_docs)]
- pub fn undocumented_end() -> int { 3 }
+ pub fn undocumented_end() -> i32 { 3 }
}
```
#[allow(missing_docs)]
mod nested {
// Missing documentation is ignored here
- pub fn undocumented_one() -> int { 1 }
+ pub fn undocumented_one() -> i32 { 1 }
// Missing documentation signals a warning here,
// despite the allow above.
#[warn(missing_docs)]
- pub fn undocumented_two() -> int { 2 }
+ pub fn undocumented_two() -> i32 { 2 }
}
// Missing documentation signals a warning here
- pub fn undocumented_too() -> int { 3 }
+ pub fn undocumented_too() -> i32 { 3 }
}
```
// Attempting to toggle warning signals an error here
#[allow(missing_docs)]
/// Returns 2.
- pub fn undocumented_too() -> int { 2 }
+ pub fn undocumented_too() -> i32 { 2 }
}
```
: ___Needs filling in___
* `no_copy_bound`
: This type does not implement "copy", even if eligible.
-* `no_send_bound`
- : This type does not implement "send", even if eligible.
-* `no_sync_bound`
- : This type does not implement "sync", even if eligible.
* `eh_personality`
: ___Needs filling in___
* `exchange_free`
* `#[inline(always)]` asks the compiler to always perform an inline expansion.
* `#[inline(never)]` asks the compiler to never perform an inline expansion.
-### Derive
+### `derive`
The `derive` attribute allows certain traits to be automatically implemented
for data structures. For example, the following will create an `impl` for the
```
#[derive(PartialEq, Clone)]
struct Foo<T> {
- a: int,
+ a: i32,
b: T
}
```
The generated `impl` for `PartialEq` is equivalent to
```
-# struct Foo<T> { a: int, b: T }
+# struct Foo<T> { a: i32, b: T }
impl<T: PartialEq> PartialEq for Foo<T> {
fn eq(&self, other: &Foo<T>) -> bool {
self.a == other.a && self.b == other.b
```{.tuple}
(0,);
(0.0, 4.5);
-("a", 4u, true);
+("a", 4us, true);
```
### Unit expressions
```
# struct Point { x: f64, y: f64 }
# struct TuplePoint(f64, f64);
-# mod game { pub struct User<'a> { pub name: &'a str, pub age: uint, pub score: uint } }
+# mod game { pub struct User<'a> { pub name: &'a str, pub age: u32, pub score: uint } }
# struct Cookie; fn some_fn<T>(t: T) {}
Point {x: 10.0, y: 20.0};
TuplePoint(10.0, 20.0);
fields.
```
-# struct Point3d { x: int, y: int, z: int }
+# struct Point3d { x: i32, y: i32, z: i32 }
let base = Point3d {x: 1, y: 2, z: 3};
Point3d {y: 0, z: 10, .. base};
```
[literal](#literals) or a [static item](#static-items).
```
-[1i, 2, 3, 4];
+[1is, 2, 3, 4];
["a", "b", "c", "d"];
-[0i; 128]; // array with 128 zeros
+[0is; 128]; // array with 128 zeros
[0u8, 0u8, 0u8, 0u8];
```
```
# fn sum(v: &[f64]) -> f64 { 0.0 }
-# fn len(v: &[f64]) -> int { 0 }
+# fn len(v: &[f64]) -> i32 { 0 }
fn avg(v: &[f64]) -> f64 {
let sum: f64 = sum(v);
operand.
```
-# let mut x = 0i;
+# let mut x = 0is;
# let y = 0;
x = y;
An example of a parenthesized expression:
```
-let x: int = (2 + 3) * 4;
+let x: i32 = (2 + 3) * 4;
```
Some examples of call expressions:
```
-# fn add(x: int, y: int) -> int { 0 }
+# fn add(x: i32, y: i32) -> i32 { 0 }
-let x: int = add(1, 2);
+let x: i32 = add(1i32, 2i32);
let pi: Option<f32> = "3.14".parse();
```
function argument, and call it with a lambda expression as an argument:
```
-fn ten_times<F>(f: F) where F: Fn(int) {
- let mut i = 0;
+fn ten_times<F>(f: F) where F: Fn(i32) {
+ let mut i = 0i32;
while i < 10 {
f(i);
i += 1;
An example:
```
-let mut i = 0u;
+let mut i = 0us;
while i < 10 {
println!("hello");
An example of a for loop over the contents of an array:
```
-# type Foo = int;
+# type Foo = i32;
# fn bar(f: Foo) { }
# let a = 0;
# let b = 0;
An example of a for loop over a series of integers:
```
-# fn bar(b:uint) { }
-for i in range(0u, 256) {
+# fn bar(b:usize) { }
+for i in range(0us, 256) {
bar(i);
}
```
enum List<X> { Nil, Cons(X, Box<List<X>>) }
fn main() {
- let x: List<int> = List::Cons(10, box List::Cons(11, box List::Nil));
+ let x: List<i32> = List::Cons(10, box List::Cons(11, box List::Nil));
match x {
List::Cons(_, box List::Nil) => panic!("singleton list"),
`advanced_slice_patterns` feature gate is turned on. This wildcard can be used
at most once for a given array, which implies that it cannot be used to
specifically match elements that are at an unknown distance from both ends of a
-array, like `[.., 42, ..]`. If followed by a variable name, it will bind the
+array, like `[.., 42, ..]`. If preceded by a variable name, it will bind the
corresponding slice to the variable. Example:
```
# #![feature(advanced_slice_patterns)]
-fn is_symmetric(list: &[uint]) -> bool {
+fn is_symmetric(list: &[u32]) -> bool {
match list {
[] | [_] => true,
[x, inside.., y] if x == y => is_symmetric(inside),
```
#![feature(box_syntax)]
-# fn process_pair(a: int, b: int) { }
+# fn process_pair(a: i32, b: i32) { }
# fn process_ten() { }
enum List<X> { Nil, Cons(X, Box<List<X>>) }
fn main() {
- let x: List<int> = List::Cons(10, box List::Cons(11, box List::Nil));
+ let x: List<i32> = List::Cons(10, box List::Cons(11, box List::Nil));
match x {
List::Cons(a, box List::Cons(b, _)) => {
```
Patterns can also dereference pointers by using the `&`, `&mut` and `box`
-symbols, as appropriate. For example, these two matches on `x: &int` are
+symbols, as appropriate. For example, these two matches on `x: &isize` are
equivalent:
```
-# let x = &3i;
+# let x = &3is;
let y = match *x { 0 => "zero", _ => "some" };
let z = match x { &0 => "zero", _ => "some" };
may be specified with `...`. For example:
```
-# let x = 2i;
+# let x = 2is;
let message = match x {
0 | 1 => "not many",
```
# let maybe_digit = Some(0);
-# fn process_digit(i: int) { }
-# fn process_other(i: int) { }
+# fn process_digit(i: i32) { }
+# fn process_other(i: i32) { }
let message = match maybe_digit {
Some(x) if x < 10 => process_digit(x),
An example of a `return` expression:
```
-fn max(a: int, b: int) -> int {
+fn max(a: i32, b: i32) -> i32 {
if a > b {
return a;
}
#### Machine-dependent integer types
-The `uint` type is an unsigned integer type with the same number of bits as the
+The `usize` type is an unsigned integer type with the same number of bits as the
platform's pointer type. It can represent every memory address in the process.
-The `int` type is a signed integer type with the same number of bits as the
+The `isize` type is a signed integer type with the same number of bits as the
platform's pointer type. The theoretical upper bound on object and array size
-is the maximum `int` value. This ensures that `int` can be used to calculate
+is the maximum `isize` value. This ensures that `isize` can be used to calculate
differences between pointers into an object or array and can address every byte
within an object along with one byte past the end.
An example of a tuple type and its use:
```
-type Pair<'a> = (int, &'a str);
+type Pair<'a> = (i32, &'a str);
let p: Pair<'static> = (10, "hello");
let (a, b) = p;
assert!(b != "world");
An example of a `fn` type:
```
-fn add(x: int, y: int) -> int {
+fn add(x: i32, y: i32) -> i32 {
return x + y;
}
let mut x = add(5,7);
-type Binop = fn(int, int) -> int;
+type Binop = fn(i32, i32) -> i32;
let bo: Binop = add;
x = bo(5,7);
```
An example of creating and calling a closure:
```rust
-let captured_var = 10i;
+let captured_var = 10is;
let closure_no_args = |&:| println!("captured_var={}", captured_var);
-let closure_args = |&: arg: int| -> int {
+let closure_args = |&: arg: isize| -> isize {
println!("captured_var={}, arg={}", captured_var, arg);
arg // Note lack of semicolon after 'arg'
};
-fn call_closure<F: Fn(), G: Fn(int) -> int>(c1: F, c2: G) {
+fn call_closure<F: Fn(), G: Fn(isize) -> isize>(c1: F, c2: G) {
c1();
c2(2);
}
fn stringify(&self) -> String;
}
-impl Printable for int {
+impl Printable for isize {
fn stringify(&self) -> String { self.to_string() }
}
}
fn main() {
- print(Box::new(10i) as Box<Printable>);
+ print(Box::new(10is) as Box<Printable>);
}
```
Function parameters are immutable unless declared with `mut`. The `mut` keyword
applies only to the following parameter (so `|mut x, y|` and `fn f(mut x:
-Box<int>, y: Box<int>)` declare one mutable variable `x` and one immutable
+Box<i32>, y: Box<i32>)` declare one mutable variable `x` and one immutable
variable `y`).
Methods that take either `self` or `Box<Self>` can optionally place them in a
An example of a box type and value:
```
-let x: Box<int> = Box::new(10);
+let x: Box<i32> = Box::new(10);
```
Box values exist in 1:1 correspondence with their heap allocation, copying a
the source location cannot be used unless it is reinitialized.
```
-let x: Box<int> = Box::new(10);
+let x: Box<i32> = Box::new(10);
let y = x;
// attempting to use `x` will result in an error here
```
Okay, let's talk about the actual code in the example. `cmp` is a function that
compares two things, and returns an `Ordering`. We return either
`Ordering::Less`, `Ordering::Greater`, or `Ordering::Equal`, depending on if
-the two values are greater, less, or equal. Note that each variant of the
+the two values are less, greater, or equal. Note that each variant of the
`enum` is namespaced under the `enum` itself: it's `Ordering::Greater` not
`Greater`.
these sub-modules don't have their own sub-modules, we've chosen to make them
`src/english/greetings.rs` and `src/japanese/farewells.rs`. Whew!
-Right now, the contents of `src/english/greetings.rs` and
-`src/japanese/farewells.rs` are both empty at the moment. Let's add some
-functions.
+The contents of `src/english/greetings.rs` and `src/japanese/farewells.rs` are
+both empty at the moment. Let's add some functions.
Put this in `src/english/greetings.rs`:
(This is "Sayōnara", if you're curious.)
-Now that we have our some functionality in our crate, let's try to use it from
+Now that we have some functionality in our crate, let's try to use it from
another crate.
# Importing External Crates
can be any letter you'd like. We could define `Result<T, E>` as:
```{rust}
-enum Result<H, N> {
- Ok(H),
- Err(N),
+enum Result<A, Z> {
+ Ok(A),
+ Err(Z),
}
```
-% `if`
+% If
Rust's take on `if` is not particularly complex, but it's much more like the
`if` you'll find in a dynamically typed language than in a more traditional
that could be invoked like: `my_macro!(i->(( 2+2 )))`.
To avoid ambiguity, macro invocation syntax must conform to the following rules:
+
* `expr` must be followed by `=>`, `,` or `;`.
* `ty` and `path` must be followed by `=>`, `,`, `:`, `=`, `>` or `as`.
* `pat` must be followed by `=>`, `,` or `=`.
let x: &'static i32 = &FOO;
```
-This adds an `i32` to the data segment of the binary, and `FOO` is a reference
+This adds an `i32` to the data segment of the binary, and `x` is a reference
to it.
# Shared Ownership
// Copy the object out from the pointer onto the stack,
// where it is covered by normal Rust destructor semantics
// and cleans itself up, if necessary
- ptr::read(self.ptr as *const T);
+ ptr::read(self.ptr);
// clean-up our allocation
free(self.ptr as *mut c_void)
`deref`, and `add` respectively.
- stack unwinding and general failure; the `eh_personality`, `fail`
and `fail_bounds_checks` lang items.
-- the traits in `std::markers` used to indicate types of
+- the traits in `std::marker` used to indicate types of
various kinds; lang items `send`, `sync` and `copy`.
- the marker types and variance indicators found in
- `std::markers`; lang items `covariant_type`,
- `contravariant_lifetime`, `no_sync_bound`, etc.
+ `std::marker`; lang items `covariant_type`,
+ `contravariant_lifetime`, etc.
Lang items are loaded lazily by the compiler; e.g. if one never uses
`Box` then there is no need to define functions for `exchange_malloc`
+++ /dev/null
-#!/usr/bin/perl -w
-# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-my $file = $ARGV[0];
-
-my @lines = <>;
-
-my $anchors = {};
-
-my $i = 0;
-for $line (@lines) {
- $i++;
- if ($line =~ m/id="([^"]+)"/) {
- $anchors->{$1} = $i;
- }
-}
-
-$i = 0;
-for $line (@lines) {
- $i++;
- while ($line =~ m/href="#([^"]+)"/g) {
- if (! exists($anchors->{$1})) {
- print "$file:$i: $1 referenced\n";
- }
- }
-}
:type 'integer
:group 'rust-mode)
+(defcustom rust-indent-method-chain nil
+ "Indent Rust method chains, aligned by the '.' operators"
+ :type 'boolean
+ :group 'rust-mode)
+
(defun rust-paren-level () (nth 0 (syntax-ppss)))
(defun rust-in-str-or-cmnt () (nth 8 (syntax-ppss)))
(defun rust-rewind-past-str-cmnt () (goto-char (nth 8 (syntax-ppss))))
;; open bracket ends the line
(when (not (looking-at "[[:blank:]]*\\(?://.*\\)?$"))
(when (looking-at "[[:space:]]")
- (forward-word 1)
- (backward-word 1))
+ (forward-word 1)
+ (backward-word 1))
(current-column))))
+(defun rust-align-to-method-chain ()
+ (save-excursion
+ (previous-line)
+ (end-of-line)
+ (backward-word 1)
+ (backward-char)
+ (when (looking-at "\\..+\(.*\)\n")
+ (- (current-column) rust-indent-offset))))
+
(defun rust-rewind-to-beginning-of-current-level-expr ()
(let ((current-level (rust-paren-level)))
(back-to-indentation)
;; the inside of it correctly relative to the outside.
(if (= 0 level)
0
+ (or
+ (when rust-indent-method-chain
+ (rust-align-to-method-chain))
(save-excursion
(backward-up-list)
(rust-rewind-to-beginning-of-current-level-expr)
- (+ (current-column) rust-indent-offset)))))
+ (+ (current-column) rust-indent-offset))))))
(cond
;; A function return type is indented to the corresponding function arguments
((looking-at "->")
;; A closing brace is 1 level unindended
((looking-at "}") (- baseline rust-indent-offset))
+ ;;Line up method chains by their .'s
+ ((when (and rust-indent-method-chain
+ (looking-at "\..+\(.*\);?\n"))
+ (or
+ (let ((method-indent (rust-align-to-method-chain)))
+ (when method-indent
+ (+ method-indent rust-indent-offset)))
+ (+ baseline rust-indent-offset))))
+
+
;; Doc comments in /** style with leading * indent to line up the *s
((and (nth 4 (syntax-ppss)) (looking-at "*"))
(+ 1 baseline))
--- /dev/null
+# Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+# Digs error codes out of files named 'diagnostics.rs' across
+# the tree, and ensures thare are no duplicates.
+
+import sys, os, re
+
+src_dir = sys.argv[1]
+
+errcode_map = { }
+
+for (dirpath, dirnames, filenames) in os.walk(src_dir):
+
+ if "src/test" in dirpath or "src/llvm" in dirpath:
+ # Short circuit for fast
+ continue
+
+ for filename in filenames:
+ if filename != "diagnostics.rs":
+ continue
+
+ path = os.path.join(dirpath, filename)
+ line_num = 1
+ with open(path, 'r') as f:
+ for line in f:
+
+ p = re.compile("(E\d\d\d\d)")
+ m = p.search(line)
+ if not m is None:
+ errcode = m.group(1)
+
+ new_record = [(errcode, path, line_num, line)]
+ existing = errcode_map.get(errcode)
+ if existing is not None:
+ # This is a dupe
+ errcode_map[errcode] = existing + new_record
+ else:
+ errcode_map[errcode] = new_record
+
+ line_num += 1
+
+errors = False
+all_errors = []
+for errcode in errcode_map:
+ entries = errcode_map[errcode]
+ all_errors += [entries[0][0]]
+ if len(entries) > 1:
+ print "error: duplicate error code " + errcode
+ for entry in entries:
+ print entry[1] + ": " + str(entry[2])
+ print entry[3]
+ errors = True
+
+print str(len(errcode_map)) + " error codes"
+
+all_errors.sort()
+all_errors.reverse()
+
+print "highest error code: " + all_errors[0]
+
+if errors:
+ sys.exit(1)
"""
This script creates a pile of compile-fail tests check that all the
-derivings have spans that point to the fields, rather than the
-#[deriving(...)] line.
+derives have spans that point to the fields, rather than the
+#[derive(...)] line.
sample usage: src/etc/generate-deriving-span-tests.py
"""
"""
ENUM_STRING = """
-#[deriving({traits})]
+#[derive({traits})]
enum Enum {{
A(
Error {errors}
}}
"""
ENUM_STRUCT_VARIANT_STRING = """
-#[deriving({traits})]
+#[derive({traits})]
enum Enum {{
A {{
x: Error {errors}
}}
"""
STRUCT_STRING = """
-#[deriving({traits})]
+#[derive({traits})]
struct Struct {{
x: Error {errors}
}}
"""
STRUCT_TUPLE_STRING = """
-#[deriving({traits})]
+#[derive({traits})]
struct Struct(
Error {errors}
);
string = [ENUM_STRING, ENUM_STRUCT_VARIANT_STRING, STRUCT_STRING, STRUCT_TUPLE_STRING][type]
all_traits = ','.join([trait] + super_traits)
super_traits = ','.join(super_traits)
- error_deriving = '#[deriving(%s)]' % super_traits if super_traits else ''
+ error_deriving = '#[derive(%s)]' % super_traits if super_traits else ''
errors = '\n'.join('//~%s ERROR' % ('^' * n) for n in range(error_count))
code = string.format(traits = all_traits, errors = errors)
return TEMPLATE.format(year = YEAR, error_deriving=error_deriving, code = code)
def write_file(name, string):
- test_file = os.path.join(TEST_DIR, 'deriving-span-%s.rs' % name)
+ test_file = os.path.join(TEST_DIR, 'derives-span-%s.rs' % name)
# set write permission if file exists, so it can be changed
if os.path.exists(test_file):
-#!/usr/bin/perl
-use strict;
-use warnings;
+#!/usr/bin/env python
+import re
+import sys
-my $indent = 0;
-while (<>) {
- if (/^rust: ~">>/) {
- $indent += 1;
- }
+indent = 0
+more_re = re.compile(r"^rust: ~\">>")
+less_re = re.compile(r"^rust: ~\"<<")
+while True:
+ line = sys.stdin.readline()
+ if not line:
+ break
- printf "%03d %s%s", $indent, (" " x $indent), $_;
+ if more_re.match(line):
+ indent += 1
- if (/^rust: ~"<</) {
- $indent -= 1;
- }
-}
+ print "%03d %s%s" % (indent, " " * indent, line.strip())
+
+ if less_re.match(line):
+ indent -= 1
<DetectChar char="=" attribute="Normal Text" context="#pop"/>
<DetectChar char="<" attribute="Normal Text" context="#pop"/>
</context>
- <context attribute="String" lineEndContext="#stay" name="String">
+ <context attribute="String" lineEndContext="#pop" name="String">
<LineContinue attribute="String" context="#stay"/>
<DetectChar char="\" attribute="CharEscape" context="CharEscape"/>
<DetectChar attribute="String" context="#pop" char="""/>
--- /dev/null
+# Nano configuration for Rust
+# Copyright 2015 The Rust Project Developers.
+#
+# NOTE: Rules are applied in order: later rules re-colorize matching text.
+syntax "rust" "\.rs"
+
+# function definition
+color magenta "fn [a-z0-9_]+"
+
+# Reserved words
+color yellow "\<(abstract|alignof|as|be|box|break|const|continue|crate|do|else|enum|extern|false|final|fn|for|if|impl|in|let|loop|macro|match|mod|move|mut|offsetof|override|priv|pub|pure|ref|return|sizeof|static|self|struct|super|true|trait|type|typeof|unsafe|unsized|use|virtual|where|while|yield)\>"
+
+# macros
+color red "[a-z_]+!"
+
+# Constants
+color magenta "[A-Z][A-Z_]+"
+
+# Traits/Enums/Structs/Types/etc.
+color magenta "[A-Z][a-z]+"
+
+# Strings
+color green "\".*\""
+color green start="\".*\\$" end=".*\""
+# NOTE: This isn't accurate but matching "#{0,} for the end of the string is too liberal
+color green start="r#+\"" end="\"#+"
+
+# Comments
+color blue "//.*"
+
+# Attributes
+color magenta start="#!\[" end="\]"
+
+# Some common markers
+color brightcyan "(XXX|TODO|FIXME|\?\?\?)"
|| mktemp -d -t 'rustup-tmp-install' 2>/dev/null \
|| create_tmp_dir)
-# If we're saving nightlies and we didn't specify which one, grab todays.
-# Otherwise we'll use the latest version.
+# If we're saving nightlies and we didn't specify which one, grab the latest
+# verison from the perspective of the server. Buildbot has typically finished
+# building and uploading by ~8UTC, but we want to include a little buffer.
+#
+# FIXME It would be better to use the known most recent nightly that has been
+# built. This is waiting on a change to have buildbot publish metadata that
+# can be queried.
if [ -n "${CFG_SAVE}" -a -z "${CFG_DATE}" ];
then
- CFG_DATE=`date "+%Y-%m-%d"`
+ CFG_DATE=`TZ=Etc/UTC+9 date "+%Y-%m-%d"`
fi
RUST_URL="https://static.rust-lang.org/dist"
RUST_URL="${RUST_URL}/${CFG_DATE}"
fi
-verify_hash() {
- remote_sha256="$1"
- local_file="$2"
-
+download_hash() {
msg "Downloading ${remote_sha256}"
remote_sha256=`"${CFG_CURL}" -f "${remote_sha256}"`
+ if [ -n "${CFG_SAVE}" ]; then
+ echo "${remote_sha256}" > "${local_sha_file}"
+ fi
if [ "$?" -ne 0 ]; then
rm -Rf "${CFG_TMP_DIR}"
err "Failed to download ${remote_url}"
fi
+}
+
+verify_hash() {
+ remote_sha256="$1"
+ local_file="$2"
+ local_sha_file="${local_file}.sha256"
+
+ if [ -n "${CFG_SAVE}" ]; then
+ if [ -f "${local_sha_file}" ]; then
+ msg "Local ${local_sha_file} exists, treating as remote hash"
+ remote_sha256=`cat "${local_sha_file}"`
+ else
+ download_hash
+ fi
+ else
+ download_hash
+ fi
msg "Verifying hash"
local_sha256=$(calculate_hash "${local_file}")
# option. This file may not be copied, modified, or distributed
# except according to those terms.
-import sys, fileinput, subprocess, re
+import sys, fileinput, subprocess, re, os
from licenseck import *
import snapshot
for line in fileinput.input(file_names,
openhook=fileinput.hook_encoded("utf-8")):
- if fileinput.filename().find("tidy.py") == -1:
- if line.find(cr_flag) != -1:
+ if "tidy.py" not in fileinput.filename():
+ if cr_flag in line:
check_cr = False
- if line.find(tab_flag) != -1:
+ if tab_flag in line:
check_tab = False
- if line.find(linelength_flag) != -1:
+ if linelength_flag in line:
check_linelength = False
- if line.find("TODO") != -1:
+ if "TODO" in line:
report_err("TODO is deprecated; use FIXME")
match = re.match(r'^.*/(\*|/!?)\s*XXX', line)
if match:
report_err("XXX is no longer necessary, use FIXME")
match = re.match(r'^.*//\s*(NOTE.*)$', line)
- if match:
+ if match and "TRAVIS" not in os.environ:
m = match.group(1)
if "snap" in m.lower():
report_warn(match.group(1))
if "SNAP" in line:
report_warn("unmatched SNAP line: " + line)
- if check_tab and (line.find('\t') != -1 and
- fileinput.filename().find("Makefile") == -1):
+ if check_tab and ('\t' in line and
+ "Makefile" not in fileinput.filename()):
report_err("tab character")
- if check_cr and not autocrlf and line.find('\r') != -1:
+ if check_cr and not autocrlf and '\r' in line:
report_err("CR character")
if line.endswith(" \n") or line.endswith("\t\n"):
report_err("trailing whitespace")
use core::slice;
#[allow(non_camel_case_types)]
- #[deriving(Clone)]
+ #[derive(Clone)]
pub enum GraphemeCat {
""")
for cat in grapheme_cats + ["Any"]:
use core::default::Default;
use core::fmt;
use core::hash::{self, Hash};
+use core::iter::Iterator;
use core::marker::Sized;
use core::mem;
use core::option::Option;
fn deref_mut(&mut self) -> &mut T { &mut **self }
}
+// FIXME(#21363) remove `old_impl_check` when bug is fixed
+#[old_impl_check]
+impl<'a, T> Iterator for Box<Iterator<Item=T> + 'a> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ (**self).next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (**self).size_hint()
+ }
+}
+
#[cfg(test)]
mod test {
#[test]
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
} else {
let new_ptr = allocate(size, align);
- ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
+ ptr::copy_memory(new_ptr, ptr, cmp::min(size, old_size));
deallocate(ptr, old_size, align);
new_ptr
}
#![feature(lang_items, unsafe_destructor)]
#![feature(box_syntax)]
#![feature(optin_builtin_traits)]
+// FIXME(#21363) remove `old_impl_check` when bug is fixed
+#![feature(old_impl_check)]
#![allow(unknown_features)] #![feature(int_uint)]
#[macro_use]
use core::ptr::{self, PtrExt};
use core::result::Result;
use core::result::Result::{Ok, Err};
+use core::intrinsics::assume;
use heap::deallocate;
/// See the [module level documentation](../index.html) for more details.
#[unsafe_no_drop_flag]
#[stable]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
pub struct Rc<T> {
// FIXME #12808: strange names to try to avoid interfering with field accesses of the contained
// type via Deref
_ptr: NonZero<*mut RcBox<T>>,
- _nosend: marker::NoSend,
- _noshare: marker::NoSync
}
-/// An immutable reference-counted pointer type.
-///
-/// See the [module level documentation](../index.html) for more details.
-#[unsafe_no_drop_flag]
-#[stable]
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
-pub struct Rc<T> {
- // FIXME #12808: strange names to try to avoid interfering with field accesses of the contained
- // type via Deref
- _ptr: NonZero<*mut RcBox<T>>,
-}
-
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl<T> !marker::Send for Rc<T> {}
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl<T> !marker::Sync for Rc<T> {}
impl<T> Rc<T> {
- /// Constructs a new `Rc<T>`.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::rc::Rc;
- ///
- /// let five = Rc::new(5i);
- /// ```
- #[stable]
- #[cfg(stage0)] // NOTE remove after next snapshot
- pub fn new(value: T) -> Rc<T> {
- unsafe {
- Rc {
- // there is an implicit weak pointer owned by all the strong pointers, which
- // ensures that the weak destructor never frees the allocation while the strong
- // destructor is running, even if the weak pointer is stored inside the strong one.
- _ptr: NonZero::new(transmute(box RcBox {
- value: value,
- strong: Cell::new(1),
- weak: Cell::new(1)
- })),
- _nosend: marker::NoSend,
- _noshare: marker::NoSync
- }
- }
- }
/// Constructs a new `Rc<T>`.
///
/// let five = Rc::new(5i);
/// ```
#[stable]
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
pub fn new(value: T) -> Rc<T> {
unsafe {
Rc {
///
/// let weak_five = five.downgrade();
/// ```
- #[cfg(stage0)] // NOTE remove after next snapshot
- #[unstable = "Weak pointers may not belong in this module"]
- pub fn downgrade(&self) -> Weak<T> {
- self.inc_weak();
- Weak {
- _ptr: self._ptr,
- _nosend: marker::NoSend,
- _noshare: marker::NoSync
- }
- }
-
- /// Downgrades the `Rc<T>` to a `Weak<T>` reference.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::rc::Rc;
- ///
- /// let five = Rc::new(5i);
- ///
- /// let weak_five = five.downgrade();
- /// ```
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
#[unstable = "Weak pointers may not belong in this module"]
pub fn downgrade(&self) -> Weak<T> {
self.inc_weak();
#[stable]
impl<T> Clone for Rc<T> {
- /// Makes a clone of the `Rc<T>`.
- ///
- /// This increases the strong reference count.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::rc::Rc;
- ///
- /// let five = Rc::new(5i);
- ///
- /// five.clone();
- /// ```
- #[inline]
- #[cfg(stage0)] // NOTE remove after next snapshot
- fn clone(&self) -> Rc<T> {
- self.inc_strong();
- Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoSync }
- }
/// Makes a clone of the `Rc<T>`.
///
/// five.clone();
/// ```
#[inline]
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
fn clone(&self) -> Rc<T> {
self.inc_strong();
Rc { _ptr: self._ptr }
/// See the [module level documentation](../index.html) for more.
#[unsafe_no_drop_flag]
#[unstable = "Weak pointers may not belong in this module."]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
pub struct Weak<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: NonZero<*mut RcBox<T>>,
- _nosend: marker::NoSend,
- _noshare: marker::NoSync
}
-/// A weak version of `Rc<T>`.
-///
-/// Weak references do not count when determining if the inner value should be dropped.
-///
-/// See the [module level documentation](../index.html) for more.
-#[unsafe_no_drop_flag]
-#[unstable = "Weak pointers may not belong in this module."]
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
-pub struct Weak<T> {
- // FIXME #12808: strange names to try to avoid interfering with
- // field accesses of the contained type via Deref
- _ptr: NonZero<*mut RcBox<T>>,
-}
-
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
#[allow(unstable)]
impl<T> !marker::Send for Weak<T> {}
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
#[allow(unstable)]
impl<T> !marker::Sync for Weak<T> {}
#[unstable = "Weak pointers may not belong in this module."]
impl<T> Weak<T> {
- /// Upgrades a weak reference to a strong reference.
- ///
- /// Upgrades the `Weak<T>` reference to an `Rc<T>`, if possible.
- ///
- /// Returns `None` if there were no strong references and the data was destroyed.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::rc::Rc;
- ///
- /// let five = Rc::new(5i);
- ///
- /// let weak_five = five.downgrade();
- ///
- /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
- /// ```
- #[cfg(stage0)] // NOTE remove after next snapshot
- pub fn upgrade(&self) -> Option<Rc<T>> {
- if self.strong() == 0 {
- None
- } else {
- self.inc_strong();
- Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoSync })
- }
- }
/// Upgrades a weak reference to a strong reference.
///
///
/// let strong_five: Option<Rc<_>> = weak_five.upgrade();
/// ```
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
pub fn upgrade(&self) -> Option<Rc<T>> {
if self.strong() == 0 {
None
#[unstable = "Weak pointers may not belong in this module."]
impl<T> Clone for Weak<T> {
- /// Makes a clone of the `Weak<T>`.
- ///
- /// This increases the weak reference count.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::rc::Rc;
- ///
- /// let weak_five = Rc::new(5i).downgrade();
- ///
- /// weak_five.clone();
- /// ```
- #[inline]
- #[cfg(stage0)] // NOTE remove after next snapshot
- fn clone(&self) -> Weak<T> {
- self.inc_weak();
- Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoSync }
- }
/// Makes a clone of the `Weak<T>`.
///
/// weak_five.clone();
/// ```
#[inline]
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
fn clone(&self) -> Weak<T> {
self.inc_weak();
Weak { _ptr: self._ptr }
fn strong(&self) -> uint { self.inner().strong.get() }
#[inline]
- fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
+ fn inc_strong(&self) {
+ let strong = self.strong();
+ // The reference count is always at least one unless we're about to drop the type
+ // This allows the bulk of the destructor to be omitted in cases where we know that
+ // the reference count must be > 0.
+ unsafe { assume(strong > 0); }
+ self.inner().strong.set(strong + 1);
+ }
#[inline]
- fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
+ fn dec_strong(&self) {
+ let strong = self.strong();
+ // The reference count is always at least one unless we're about to drop the type
+ // This allows the bulk of the destructor to be omitted in cases where we know that
+ // the reference count must be > 0
+ unsafe { assume(strong > 0); }
+ self.inner().strong.set(strong - 1);
+ }
#[inline]
fn weak(&self) -> uint { self.inner().weak.get() }
impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
- fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
+ fn inner(&self) -> &RcBox<T> {
+ unsafe {
+ // Safe to assume this here, as if it weren't true, we'd be breaking
+ // the contract anyway.
+ // This allows the null check to be elided in the destructor if we
+ // manipulated the reference count in the same function.
+ assume(!self._ptr.is_null());
+ &(**self._ptr)
+ }
+ }
}
impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
- fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
+ fn inner(&self) -> &RcBox<T> {
+ unsafe {
+ // Safe to assume this here, as if it weren't true, we'd be breaking
+ // the contract anyway
+ // This allows the null check to be elided in the destructor if we
+ // manipulated the reference count in the same function.
+ assume(!self._ptr.is_null());
+ &(**self._ptr)
+ }
+ }
}
#[cfg(test)]
if extra_bytes > 0 {
let mut last_word = 0u32;
- for (i, &byte) in bytes[(complete_words*4)..].iter().enumerate() {
+ for (i, &byte) in bytes[complete_words*4..].iter().enumerate() {
last_word |= (reverse_bits(byte) as u32) << (i * 8);
}
bitv.storage.push(last_word);
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
unsafe {(
mem::transmute(raw::Slice {
- data: self.keys.0 as *const K,
+ data: self.keys.0,
len: self.len()
}),
mem::transmute(raw::Slice {
- data: self.vals.0 as *const V,
+ data: self.vals.0,
len: self.len()
})
)}
} else {
unsafe {
mem::transmute(raw::Slice {
- data: self.edges.0 as *const Node<K, V>,
+ data: self.edges.0,
len: self.len() + 1
})
}
use core::default::Default;
use core::fmt::Show;
use core::fmt;
-// NOTE(stage0) remove import after a snapshot
-#[cfg(stage0)]
-use core::hash::Hash;
use core::iter::{Peekable, Map, FromIterator};
use core::ops::{BitOr, BitAnd, BitXor, Sub};
mod std {
pub use core::fmt; // necessary for panic!()
pub use core::option; // necessary for panic!()
- pub use core::clone; // deriving(Clone)
- pub use core::cmp; // deriving(Eq, Ord, etc.)
- pub use core::marker; // deriving(Copy)
- pub use core::hash; // deriving(Hash)
+ pub use core::clone; // derive(Clone)
+ pub use core::cmp; // derive(Eq, Ord, etc.)
+ pub use core::marker; // derive(Copy)
+ pub use core::hash; // derive(Hash)
}
#[cfg(test)]
#[macro_export]
#[stable]
macro_rules! vec {
- ($x:expr; $y:expr) => ({
- let xs: $crate::boxed::Box<[_]> = $crate::boxed::Box::new([$x; $y]);
- $crate::slice::SliceExt::into_vec(xs)
- });
- ($($x:expr),*) => ({
- let xs: $crate::boxed::Box<[_]> = $crate::boxed::Box::new([$($x),*]);
- $crate::slice::SliceExt::into_vec(xs)
- });
+ ($x:expr; $y:expr) => (
+ <[_] as $crate::slice::SliceExt>::into_vec(
+ $crate::boxed::Box::new([$x; $y]))
+ );
+ ($($x:expr),*) => (
+ <[_] as $crate::slice::SliceExt>::into_vec(
+ $crate::boxed::Box::new([$($x),*]))
+ );
($($x:expr,)*) => (vec![$($x),*])
}
/// Turn ptr into a slice
#[inline]
unsafe fn buffer_as_slice(&self) -> &[T] {
- mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
+ mem::transmute(RawSlice { data: self.ptr, len: self.cap })
}
/// Turn ptr into a mut slice
#[inline]
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
- mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
+ mem::transmute(RawSlice { data: self.ptr, len: self.cap })
}
/// Moves an element out of the buffer
#[inline]
unsafe fn buffer_read(&mut self, off: uint) -> T {
- ptr::read(self.ptr.offset(off as int) as *const T)
+ ptr::read(self.ptr.offset(off as int))
}
/// Writes an element into the buffer, moving it.
&*buf_v.offset(j),
(i - j) as uint);
ptr::copy_nonoverlapping_memory(buf_v.offset(j),
- &tmp as *const T,
+ &tmp,
1);
mem::forget(tmp);
}
#[test]
fn test_chunksator() {
+ use core::iter::ExactSizeIterator;
+
let v = &[1i,2,3,4,5];
+ assert_eq!(v.chunks(2).len(), 3);
+
let chunks: &[&[int]] = &[&[1i,2], &[3,4], &[5]];
assert_eq!(v.chunks(2).collect::<Vec<&[int]>>(), chunks);
let chunks: &[&[int]] = &[&[1i,2,3], &[4,5]];
#[test]
fn test_mut_chunks() {
+ use core::iter::ExactSizeIterator;
+
let mut v = [0u8, 1, 2, 3, 4, 5, 6];
+ assert_eq!(v.chunks_mut(2).len(), 4);
for (i, chunk) in v.chunks_mut(3).enumerate() {
for x in chunk.iter_mut() {
*x = i as u8;
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
unsafe {
mem::transmute(RawSlice {
- data: *self.ptr as *const T,
+ data: *self.ptr,
len: self.len,
})
}
let ptr = self.as_mut_ptr().offset(index as int);
// copy it out, unsafely having a copy of the value on
// the stack and in the vector at the same time.
- ret = ptr::read(ptr as *const T);
+ ret = ptr::read(ptr);
// Shift everything down to fill in that spot.
ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
// | |
// end_u end_t
- let t = ptr::read(pv.start_t as *const T);
+ let t = ptr::read(pv.start_t);
// start_u start_t
// | |
// +-+-+-+-+-+-+-+-+-+
fn as_slice<'a>(&'a self) -> &'a [T] {
unsafe {
mem::transmute(RawSlice {
- data: *self.ptr as *const T,
+ data: *self.ptr,
len: self.len
})
}
// We have instances of `U`s and `T`s in `vec`. Destruct them.
while self.start_u != self.end_u {
- let _ = ptr::read(self.start_u as *const U); // Run a `U` destructor.
+ let _ = ptr::read(self.start_u); // Run a `U` destructor.
self.start_u = self.start_u.offset(1);
}
while self.start_t != self.end_t {
- let _ = ptr::read(self.start_t as *const T); // Run a `T` destructor.
+ let _ = ptr::read(self.start_t); // Run a `T` destructor.
self.start_t = self.start_t.offset(1);
}
// After this destructor ran, the destructor of `vec` will run,
#[should_fail]
fn test_slice_out_of_bounds_1() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
- &x[(-1)..];
+ &x[-1..];
}
#[test]
#[should_fail]
fn test_slice_out_of_bounds_3() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
- &x[(-1)..4];
+ &x[-1..4];
}
#[test]
}
}
- /// Returns an iterator visiting all keys in ascending order by the keys.
+ /// Returns an iterator visiting all keys in ascending order of the keys.
/// The iterator's element type is `uint`.
#[stable]
pub fn keys<'r>(&'r self) -> Keys<'r, V> {
Keys { iter: self.iter().map(first) }
}
- /// Returns an iterator visiting all values in ascending order by the keys.
+ /// Returns an iterator visiting all values in ascending order of the keys.
/// The iterator's element type is `&'r V`.
#[stable]
pub fn values<'r>(&'r self) -> Values<'r, V> {
Values { iter: self.iter().map(second) }
}
- /// Returns an iterator visiting all key-value pairs in ascending order by the keys.
+ /// Returns an iterator visiting all key-value pairs in ascending order of the keys.
/// The iterator's element type is `(uint, &'r V)`.
///
/// # Examples
}
}
- /// Returns an iterator visiting all key-value pairs in ascending order by the keys,
+ /// Returns an iterator visiting all key-value pairs in ascending order of the keys,
/// with mutable references to the values.
/// The iterator's element type is `(uint, &'r mut V)`.
///
}
}
- /// Returns an iterator visiting all key-value pairs in ascending order by
- /// the keys, emptying (but not consuming) the original `VecMap`.
+ /// Returns an iterator visiting all key-value pairs in ascending order of
+ /// the keys, consuming the original `VecMap`.
/// The iterator's element type is `(uint, &'r V)`.
///
/// # Examples
/// map.insert(3, "c");
/// map.insert(2, "b");
///
- /// // Not possible with .iter()
/// let vec: Vec<(uint, &str)> = map.into_iter().collect();
///
/// assert_eq!(vec, vec![(1, "a"), (2, "b"), (3, "c")]);
/// ```
#[stable]
- pub fn into_iter(&mut self) -> IntoIter<V> {
+ pub fn into_iter(self) -> IntoIter<V> {
+ fn filter<A>((i, v): (uint, Option<A>)) -> Option<(uint, A)> {
+ v.map(|v| (i, v))
+ }
+ let filter: fn((uint, Option<V>)) -> Option<(uint, V)> = filter; // coerce to fn ptr
+
+ IntoIter { iter: self.v.into_iter().enumerate().filter_map(filter) }
+ }
+
+ /// Returns an iterator visiting all key-value pairs in ascending order of
+ /// the keys, emptying (but not consuming) the original `VecMap`.
+ /// The iterator's element type is `(uint, &'r V)`. Keeps the allocated memory for reuse.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecMap;
+ ///
+ /// let mut map = VecMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(3, "c");
+ /// map.insert(2, "b");
+ ///
+ /// let vec: Vec<(uint, &str)> = map.drain().collect();
+ ///
+ /// assert_eq!(vec, vec![(1, "a"), (2, "b"), (3, "c")]);
+ /// ```
+ #[unstable = "matches collection reform specification, waiting for dust to settle"]
+ pub fn drain<'a>(&'a mut self) -> Drain<'a, V> {
fn filter<A>((i, v): (uint, Option<A>)) -> Option<(uint, A)> {
v.map(|v| (i, v))
}
let filter: fn((uint, Option<V>)) -> Option<(uint, V)> = filter; // coerce to fn ptr
- let values = replace(&mut self.v, vec!());
- IntoIter { iter: values.into_iter().enumerate().filter_map(filter) }
+ Drain { iter: self.v.drain().enumerate().filter_map(filter) }
}
/// Return the number of elements in the map.
fn((uint, Option<V>)) -> Option<(uint, V)>>
}
+#[unstable]
+pub struct Drain<'a, V> {
+ iter: FilterMap<
+ (uint, Option<V>),
+ (uint, V),
+ Enumerate<vec::Drain<'a, Option<V>>>,
+ fn((uint, Option<V>)) -> Option<(uint, V)>>
+}
+
+#[unstable]
+impl<'a, V> Iterator for Drain<'a, V> {
+ type Item = (uint, V);
+
+ fn next(&mut self) -> Option<(uint, V)> { self.iter.next() }
+ fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
+}
+
+#[unstable]
+impl<'a, V> DoubleEndedIterator for Drain<'a, V> {
+ fn next_back(&mut self) -> Option<(uint, V)> { self.iter.next_back() }
+}
+
#[stable]
impl<'a, V> Iterator for Keys<'a, V> {
type Item = uint;
assert_eq!(v, box 2i);
}
assert!(called);
- m.insert(2, box 1i);
+ }
+
+ #[test]
+ fn test_drain_iterator() {
+ let mut map = VecMap::new();
+ map.insert(1, "a");
+ map.insert(3, "c");
+ map.insert(2, "b");
+
+ let vec: Vec<(usize, &str)> = map.drain().collect();
+
+ assert_eq!(vec, vec![(1, "a"), (2, "b"), (3, "c")]);
+ assert_eq!(map.len(), 0);
}
#[test]
use clone::Clone;
use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering};
use fmt;
+use hash::{Hash, Hasher, self};
use marker::Copy;
use ops::{Deref, FullRange};
use option::Option;
}
}
+ impl<S: hash::Writer + Hasher, T: Hash<S>> Hash<S> for [T; $N] {
+ fn hash(&self, state: &mut S) {
+ Hash::hash(&self[], state)
+ }
+ }
+
#[unstable = "waiting for Show to stabilize"]
impl<T:fmt::Show> fmt::Show for [T; $N] {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> bool {
- unsafe { atomic_load(self.v.get() as *const usize, order) > 0 }
+ unsafe { atomic_load(self.v.get(), order) > 0 }
}
/// Stores a value into the bool.
/// ```
#[inline]
pub fn load(&self, order: Ordering) -> isize {
- unsafe { atomic_load(self.v.get() as *const isize, order) }
+ unsafe { atomic_load(self.v.get(), order) }
}
/// Stores a value into the isize.
/// ```
#[inline]
pub fn load(&self, order: Ordering) -> usize {
- unsafe { atomic_load(self.v.get() as *const usize, order) }
+ unsafe { atomic_load(self.v.get(), order) }
}
/// Stores a value into the usize.
#[stable]
pub fn load(&self, order: Ordering) -> *mut T {
unsafe {
- atomic_load(self.p.get() as *const *mut T, order) as *mut T
+ atomic_load(self.p.get(), order) as *mut T
}
}
#[inline]
pub fn load(&self, order: Ordering) -> int {
- unsafe { atomic_load(self.v.get() as *const int, order) }
+ unsafe { atomic_load(self.v.get(), order) }
}
#[inline]
#[inline]
pub fn load(&self, order: Ordering) -> uint {
- unsafe { atomic_load(self.v.get() as *const uint, order) }
+ unsafe { atomic_load(self.v.get(), order) }
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) { (0, None) }
}
+// FIXME(#21363) remove `old_impl_check` when bug is fixed
+#[old_impl_check]
+impl<'a, T> Iterator for &'a mut (Iterator<Item=T> + 'a) {
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ (**self).next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (**self).size_hint()
+ }
+}
+
/// Conversion from an `Iterator`
#[stable]
#[rustc_on_unimplemented="a collection of type `{Self}` cannot be \
#![feature(unboxed_closures)]
#![allow(unknown_features)] #![feature(int_uint)]
#![feature(on_unimplemented)]
+// FIXME(#21363) remove `old_impl_check` when bug is fixed
+#![feature(old_impl_check)]
#![deny(missing_docs)]
#[macro_use]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct InvariantLifetime<'a>;
-/// A type which is considered "not sendable", meaning that it cannot
-/// be safely sent between tasks, even if it is owned. This is
-/// typically embedded in other types, such as `Gc`, to ensure that
-/// their instances remain thread-local.
-#[unstable = "likely to change with new variance strategy"]
-#[lang="no_send_bound"]
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
-pub struct NoSend;
-
/// A type which is considered "not POD", meaning that it is not
/// implicitly copyable. This is typically embedded in other types to
/// ensure that they are never copied, even if they lack a destructor.
#[allow(missing_copy_implementations)]
pub struct NoCopy;
-/// A type which is considered "not sync", meaning that
-/// its contents are not threadsafe, hence they cannot be
-/// shared between tasks.
-#[unstable = "likely to change with new variance strategy"]
-#[lang="no_sync_bound"]
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
-pub struct NoSync;
-
/// A type which is considered managed by the GC. This is typically
/// embedded in other types.
#[unstable = "likely to change with new variance strategy"]
//! Implementing these traits allows you to get an effect similar to
//! overloading operators.
//!
-//! The values for the right hand side of an operator are automatically
-//! borrowed, so `a + b` is sugar for `a.add(&b)`.
-//!
-//! All of these traits are imported by the prelude, so they are available in
+//! Some of these traits are imported by the prelude, so they are available in
//! every Rust program.
//!
+//! Many of the operators take their operands by value. In non-generic
+//! contexts involving built-in types, this is usually not a problem.
+//! However, using these operators in generic code, requires some
+//! attention if values have to be reused as opposed to letting the operators
+//! consume them. One option is to occasionally use `clone()`.
+//! Another option is to rely on the types involved providing additional
+//! operator implementations for references. For example, for a user-defined
+//! type `T` which is supposed to support addition, it is probably a good
+//! idea to have both `T` and `&T` implement the traits `Add<T>` and `Add<&T>`
+//! so that generic code can be written without unnecessary cloning.
+//!
//! # Example
//!
//! This example creates a `Point` struct that implements `Add` and `Sub`, and then
//! demonstrates adding and subtracting two `Point`s.
//!
//! ```rust
-//! #![feature(associated_types)]
-//!
//! use std::ops::{Add, Sub};
//!
//! #[derive(Show)]
fn drop(&mut self);
}
+// implements the unary operator "op &T"
+// based on "op T" where T is expected to be `Copy`able
+macro_rules! forward_ref_unop {
+ (impl $imp:ident, $method:ident for $t:ty) => {
+ #[unstable = "recently added, waiting for dust to settle"]
+ impl<'a> $imp for &'a $t {
+ type Output = <$t as $imp>::Output;
+
+ #[inline]
+ fn $method(self) -> <$t as $imp>::Output {
+ $imp::$method(*self)
+ }
+ }
+ }
+}
+
+// implements binary operators "&T op U", "T op &U", "&T op &U"
+// based on "T op U" where T and U are expected to be `Copy`able
+macro_rules! forward_ref_binop {
+ (impl $imp:ident, $method:ident for $t:ty, $u:ty) => {
+ #[unstable = "recently added, waiting for dust to settle"]
+ impl<'a> $imp<$u> for &'a $t {
+ type Output = <$t as $imp<$u>>::Output;
+
+ #[inline]
+ fn $method(self, other: $u) -> <$t as $imp<$u>>::Output {
+ $imp::$method(*self, other)
+ }
+ }
+
+ #[unstable = "recently added, waiting for dust to settle"]
+ impl<'a> $imp<&'a $u> for $t {
+ type Output = <$t as $imp<$u>>::Output;
+
+ #[inline]
+ fn $method(self, other: &'a $u) -> <$t as $imp<$u>>::Output {
+ $imp::$method(self, *other)
+ }
+ }
+
+ #[unstable = "recently added, waiting for dust to settle"]
+ impl<'a, 'b> $imp<&'a $u> for &'b $t {
+ type Output = <$t as $imp<$u>>::Output;
+
+ #[inline]
+ fn $method(self, other: &'a $u) -> <$t as $imp<$u>>::Output {
+ $imp::$method(*self, *other)
+ }
+ }
+ }
+}
+
/// The `Add` trait is used to specify the functionality of `+`.
///
/// # Example
/// calling `add`, and therefore, `main` prints `Adding!`.
///
/// ```rust
-/// #![feature(associated_types)]
-///
/// use std::ops::Add;
///
/// #[derive(Copy)]
#[inline]
fn add(self, other: $t) -> $t { self + other }
}
+
+ forward_ref_binop! { impl Add, add for $t, $t }
)*)
}
/// calling `sub`, and therefore, `main` prints `Subtracting!`.
///
/// ```rust
-/// #![feature(associated_types)]
-///
/// use std::ops::Sub;
///
/// #[derive(Copy)]
#[inline]
fn sub(self, other: $t) -> $t { self - other }
}
+
+ forward_ref_binop! { impl Sub, sub for $t, $t }
)*)
}
/// calling `mul`, and therefore, `main` prints `Multiplying!`.
///
/// ```rust
-/// #![feature(associated_types)]
-///
/// use std::ops::Mul;
///
/// #[derive(Copy)]
#[inline]
fn mul(self, other: $t) -> $t { self * other }
}
+
+ forward_ref_binop! { impl Mul, mul for $t, $t }
)*)
}
/// calling `div`, and therefore, `main` prints `Dividing!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::Div;
///
/// #[derive(Copy)]
#[inline]
fn div(self, other: $t) -> $t { self / other }
}
+
+ forward_ref_binop! { impl Div, div for $t, $t }
)*)
}
/// calling `rem`, and therefore, `main` prints `Remainder-ing!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::Rem;
///
/// #[derive(Copy)]
#[inline]
fn rem(self, other: $t) -> $t { self % other }
}
+
+ forward_ref_binop! { impl Rem, rem for $t, $t }
)*)
}
unsafe { $fmod(self, other) }
}
}
+
+ forward_ref_binop! { impl Rem, rem for $t, $t }
}
}
/// `neg`, and therefore, `main` prints `Negating!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::Neg;
///
/// struct Foo;
#[stable]
fn neg(self) -> $t { -self }
}
+
+ forward_ref_unop! { impl Neg, neg for $t }
)*)
}
#[inline]
fn neg(self) -> $t { -(self as $t_signed) as $t }
}
+
+ forward_ref_unop! { impl Neg, neg for $t }
}
}
/// `not`, and therefore, `main` prints `Not-ing!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::Not;
///
/// struct Foo;
#[inline]
fn not(self) -> $t { !self }
}
+
+ forward_ref_unop! { impl Not, not for $t }
)*)
}
/// calling `bitand`, and therefore, `main` prints `Bitwise And-ing!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::BitAnd;
///
/// #[derive(Copy)]
#[inline]
fn bitand(self, rhs: $t) -> $t { self & rhs }
}
+
+ forward_ref_binop! { impl BitAnd, bitand for $t, $t }
)*)
}
/// calling `bitor`, and therefore, `main` prints `Bitwise Or-ing!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::BitOr;
///
/// #[derive(Copy)]
#[inline]
fn bitor(self, rhs: $t) -> $t { self | rhs }
}
+
+ forward_ref_binop! { impl BitOr, bitor for $t, $t }
)*)
}
/// calling `bitxor`, and therefore, `main` prints `Bitwise Xor-ing!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::BitXor;
///
/// #[derive(Copy)]
#[inline]
fn bitxor(self, other: $t) -> $t { self ^ other }
}
+
+ forward_ref_binop! { impl BitXor, bitxor for $t, $t }
)*)
}
/// calling `shl`, and therefore, `main` prints `Shifting left!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::Shl;
///
/// #[derive(Copy)]
self << other
}
}
+
+ forward_ref_binop! { impl Shl, shl for $t, $f }
)
}
/// calling `shr`, and therefore, `main` prints `Shifting right!`.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::Shr;
///
/// #[derive(Copy)]
self >> other
}
}
+
+ forward_ref_binop! { impl Shr, shr for $t, $f }
)
}
}
/// An unbounded range.
-#[derive(Copy, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[lang="full_range"]
#[unstable = "API still in development"]
pub struct FullRange;
}
/// A (half-open) range which is bounded at both ends.
-#[derive(Copy, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[lang="range"]
#[unstable = "API still in development"]
pub struct Range<Idx> {
}
/// A range which is only bounded below.
-#[derive(Copy, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[lang="range_from"]
#[unstable = "API still in development"]
pub struct RangeFrom<Idx> {
}
/// A range which is only bounded above.
-#[derive(Copy, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
#[lang="range_to"]
#[unstable = "API still in development"]
pub struct RangeTo<Idx> {
/// struct.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::Deref;
///
/// struct DerefExample<T> {
/// struct.
///
/// ```
-/// #![feature(associated_types)]
-///
/// use std::ops::{Deref, DerefMut};
///
/// struct DerefMutExample<T> {
#[inline]
#[stable]
unsafe fn offset(self, count: int) -> *mut T {
- intrinsics::offset(self as *const T, count) as *mut T
+ intrinsics::offset(self, count) as *mut T
}
#[inline]
#[inline]
fn init(&self) -> &[T] {
- &self[..(self.len() - 1)]
+ &self[..self.len() - 1]
}
#[inline]
#[inline]
fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
let (m, n) = (self.len(), needle.len());
- m >= n && needle == &self[(m-n)..]
+ m >= n && needle == &self[m-n..]
}
#[unstable]
diff / mem::size_of::<$t>()
};
unsafe {
- transmute::<_, $result>(RawSlice { data: $start as *const T, len: len })
+ transmute::<_, $result>(RawSlice { data: $start, len: len })
}
}}
}
None => self.finish(),
Some(idx) => {
let ret = Some(&self.v[..idx]);
- self.v = &self.v[(idx + 1)..];
+ self.v = &self.v[idx + 1..];
ret
}
}
match self.v.iter().rposition(|x| (self.pred)(x)) {
None => self.finish(),
Some(idx) => {
- let ret = Some(&self.v[(idx + 1)..]);
+ let ret = Some(&self.v[idx + 1..]);
self.v = &self.v[..idx];
ret
}
}
}
+#[stable]
+impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
+
#[unstable = "trait is experimental"]
impl<'a, T> RandomAccessIterator for Chunks<'a, T> {
#[inline]
}
}
+#[stable]
+impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
//
// Free functions
#[inline]
#[unstable = "should be renamed to from_raw_parts_mut"]
pub unsafe fn from_raw_mut_buf<'a, T>(p: &'a *mut T, len: uint) -> &'a mut [T] {
- transmute(RawSlice { data: *p as *const T, len: len })
+ transmute(RawSlice { data: *p, len: len })
}
//
*/
impl TwoWaySearcher {
fn new(needle: &[u8]) -> TwoWaySearcher {
- let (crit_pos1, period1) = TwoWaySearcher::maximal_suffix(needle, false);
- let (crit_pos2, period2) = TwoWaySearcher::maximal_suffix(needle, true);
-
- let crit_pos;
- let period;
- if crit_pos1 > crit_pos2 {
- crit_pos = crit_pos1;
- period = period1;
- } else {
- crit_pos = crit_pos2;
- period = period2;
- }
+ let (crit_pos_false, period_false) = TwoWaySearcher::maximal_suffix(needle, false);
+ let (crit_pos_true, period_true) = TwoWaySearcher::maximal_suffix(needle, true);
+
+ let (crit_pos, period) =
+ if crit_pos_false > crit_pos_true {
+ (crit_pos_false, period_false)
+ } else {
+ (crit_pos_true, period_true)
+ };
// This isn't in the original algorithm, as far as I'm aware.
let byteset = needle.iter()
/// Any string that can be represented as a slice
#[unstable = "Instead of taking this bound generically, this trait will be \
- replaced with one of slicing syntax, deref coercions, or \
+ replaced with one of slicing syntax (&foo[]), deref coercions, or \
a more generic conversion trait"]
pub trait Str {
/// Work with `self` as a slice.
#[inline]
fn ends_with(&self, needle: &str) -> bool {
let (m, n) = (self.len(), needle.len());
- m >= n && needle.as_bytes() == &self.as_bytes()[(m-n)..]
+ m >= n && needle.as_bytes() == &self.as_bytes()[m-n..]
}
#[inline]
fn test_double_ended_flat_map() {
let u = [0u,1];
let v = [5u,6,7,8];
- let mut it = u.iter().flat_map(|x| v[(*x)..v.len()].iter());
+ let mut it = u.iter().flat_map(|x| v[*x..v.len()].iter());
assert_eq!(it.next_back().unwrap(), &8);
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.next_back().unwrap(), &7);
(B, Cr, UnderLim) => { B }
(B, Cr, OverLim) if (i - last_start + 1) > lim
=> panic!("word starting with {} longer than limit!",
- &ss[last_start..(i + 1)]),
+ &ss[last_start..i + 1]),
(B, Cr, OverLim) => {
*cont = it(&ss[slice_start..last_end]);
slice_start = last_start;
pub type DWORDLONG = c_ulonglong;
pub type HANDLE = LPVOID;
- pub type HMODULE = c_uint;
+ pub type HINSTANCE = HANDLE;
+ pub type HMODULE = HINSTANCE;
pub type LONG = c_long;
pub type PLONG = *mut c_long;
use types::os::arch::c95::c_int;
use types::os::common::posix01::sighandler_t;
- #[cfg(not(target_os = "android"))]
+ #[cfg(not(all(target_os = "android", target_arch = "arm")))]
extern {
pub fn signal(signum: c_int,
handler: sighandler_t) -> sighandler_t;
}
- #[cfg(target_os = "android")]
+ #[cfg(all(target_os = "android", target_arch = "arm"))]
extern {
#[link_name = "bsd_signal"]
pub fn signal(signum: c_int,
};
self.chari = closer;
let greed = try!(self.get_next_greedy());
- let inner = self.chars[(start+1)..closer].iter().cloned()
+ let inner = self.chars[start+1..closer].iter().cloned()
.collect::<String>();
// Parse the min and max values from the regex.
#![allow(non_snake_case)]
-register_diagnostic! { E0001, r##"
+register_long_diagnostics! {
+ E0001: r##"
This error suggests that the expression arm corresponding to the noted pattern
will never be reached as for all possible values of the expression being matched,
one of the preceeding patterns will match.
This means that perhaps some of the preceeding patterns are too general, this
one is too specific or the ordering is incorrect.
-"## }
+"##,
+
+ E0003: r##"
+ Not-a-Number (NaN) values can not be compared for equality and hence can never match
+ the input to a match expression. To match against NaN values, you should instead use
+ the `is_nan` method in a guard, as in: x if x.is_nan() => ...
+"##,
+
+ E0004: r##"
+ This error indicates that the compiler can not guarantee a matching pattern for one
+ or more possible inputs to a match expression. Guaranteed matches are required in order
+ to assign values to match expressions, or alternatively, determine the flow of execution.
+
+ If you encounter this error you must alter your patterns so that every possible value of
+ the input type is matched. For types with a small number of variants (like enums) you
+ should probably cover all cases explicitly. Alternatively, the underscore `_` wildcard
+ pattern can be added after all other patterns to match "anything else".
+"##,
+
+ // FIXME: Remove duplication here?
+ E0005: r##"
+ Patterns used to bind names must be irrefutable, that is, they must guarantee that a
+ name will be extracted in all cases. If you encounter this error you probably need
+ to use a `match` or `if let` to deal with the possibility of failure.
+"##,
+
+ E0006: r##"
+ Patterns used to bind names must be irrefutable, that is, they must guarantee that a
+ name will be extracted in all cases. If you encounter this error you probably need
+ to use a `match` or `if let` to deal with the possibility of failure.
+"##
+}
register_diagnostics! {
E0002,
- E0003,
- E0004,
- E0005,
- E0006,
E0007,
E0008,
E0009,
E0010,
E0011,
E0012,
- E0013,
E0014,
E0015,
E0016,
E0137,
E0138,
E0139,
- E0140,
E0152,
- E0153,
- E0157,
E0158,
E0161,
E0162,
E0165,
- E0166,
- E0167,
- E0168,
- E0169,
E0170,
- E0171,
- E0172,
- E0173,
- E0174,
- E0177,
- E0178,
- E0179
+ E0261, // use of undeclared lifetime name
+ E0262, // illegal lifetime parameter name
+ E0263, // lifetime name declared twice in same scope
+ E0264, // unknown external lang item
+ E0265, // recursive constant
+ E0266, // expected item
+ E0267, // thing inside of a closure
+ E0268, // thing outside of a loop
+ E0269, // not all control paths return a value
+ E0270, // computation may converge in a function marked as diverging
+ E0271, // type mismatch resolving
+ E0272, // rustc_on_unimplemented attribute refers to non-existent type parameter
+ E0273, // rustc_on_unimplemented must have named format arguments
+ E0274, // rustc_on_unimplemented must have a value
+ E0275, // overflow evaluating requirement
+ E0276, // requirement appears on impl method but not on corresponding trait method
+ E0277, // trait is not implemented for type
+ E0278, // requirement is not satisfied
+ E0279, // requirement is not satisfied
+ E0280, // requirement is not satisfied
+ E0281, // type implements trait but other trait is required
+ E0282, // unable to infer enough type information about
+ E0283, // cannot resolve type
+ E0284, // cannot resolve type
+ E0285, // overflow evaluation builtin bounds
+ E0296, // malformed recursion limit attribute
+ E0297, // refutable pattern in for loop binding
+ E0298, // mismatched types between arms
+ E0299, // mismatched types between arms
+ E0300, // unexpanded macro
+ E0301, // cannot mutable borrow in a pattern guard
+ E0302, // cannot assign in a pattern guard
+ E0303, // pattern bindings are not allowed after an `@`
+ E0304, // expected signed integer constant
+ E0305, // expected constant
+ E0306, // expected positive integer for repeat count
+ E0307, // expected constant integer for repeat count
+ E0308,
+ E0309, // thing may not live long enough
+ E0310, // thing may not live long enough
+ E0311, // thing may not live long enough
+ E0312, // lifetime of reference outlives lifetime of borrowed content
+ E0313, // lifetime of borrowed pointer outlives lifetime of captured variable
+ E0314, // closure outlives stack frame
+ E0315 // cannot invoke closure outside of its lifetime
}
+
+__build_diagnostic_array! { DIAGNOSTICS }
+
pub use rustc_llvm as llvm;
-mod diagnostics;
+// NB: This module needs to be declared first so diagnostics are
+// registered before they are used.
+pub mod diagnostics;
pub mod back {
pub use rustc_back::abi;
pub use llvm;
}
-__build_diagnostic_array! { DIAGNOSTICS }
-
// A private module so that macro-expanded idents like
// `::rustc::lint::Lint` will also work in `rustc` itself.
//
let ident = path1.node;
if let ast::BindByValue(ast::MutMutable) = mode {
if !token::get_ident(ident).get().starts_with("_") {
- match mutables.entry(ident.name.uint()) {
+ match mutables.entry(ident.name.usize()) {
Vacant(entry) => { entry.insert(vec![id]); },
Occupied(mut entry) => { entry.get_mut().push(id); },
}
let mut ret = None;
reader::tagged_docs(tagged_doc.doc, belt, |elt| {
let pos = u64_from_be_bytes(elt.data, elt.start, 4) as uint;
- if eq_fn(&elt.data[(elt.start + 4) .. elt.end]) {
+ if eq_fn(&elt.data[elt.start + 4 .. elt.end]) {
ret = Some(reader::doc_at(d.data, pos).unwrap().doc);
false
} else {
}
}
-fn parse_trait_store_<F>(st: &mut PState, conv: &mut F) -> ty::TraitStore where
- F: FnMut(DefIdSource, ast::DefId) -> ast::DefId,
-{
- match next(st) {
- '~' => ty::UniqTraitStore,
- '&' => ty::RegionTraitStore(parse_region_(st, conv), parse_mutability(st)),
- c => {
- st.tcx.sess.bug(&format!("parse_trait_store(): bad input '{}'",
- c)[])
- }
- }
-}
-
fn parse_vec_per_param_space<'a, 'tcx, T, F>(st: &mut PState<'a, 'tcx>,
mut f: F)
-> VecPerParamSpace<T> where
})
}
-fn parse_onceness(c: char) -> ast::Onceness {
- match c {
- 'o' => ast::Once,
- 'm' => ast::Many,
- _ => panic!("parse_onceness: bad onceness")
- }
-}
-
fn parse_closure_ty<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>,
mut conv: F) -> ty::ClosureTy<'tcx> where
F: FnMut(DefIdSource, ast::DefId) -> ast::DefId,
F: FnMut(DefIdSource, ast::DefId) -> ast::DefId,
{
let unsafety = parse_unsafety(next(st));
- let onceness = parse_onceness(next(st));
- let store = parse_trait_store_(st, conv);
- let bounds = parse_existential_bounds_(st, conv);
let sig = parse_sig_(st, conv);
let abi = parse_abi_set(st);
ty::ClosureTy {
unsafety: unsafety,
- onceness: onceness,
- store: store,
- bounds: bounds,
sig: sig,
abi: abi,
}
}
let crate_part = &buf[0u..colon_idx];
- let def_part = &buf[(colon_idx + 1u)..len];
+ let def_part = &buf[colon_idx + 1u..len];
let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::<uint>()) {
Some(cn) => cn as ast::CrateNum,
enc_substs(w, cx, s.substs);
}
-pub fn enc_trait_store(w: &mut SeekableMemWriter, cx: &ctxt, s: ty::TraitStore) {
- match s {
- ty::UniqTraitStore => mywrite!(w, "~"),
- ty::RegionTraitStore(re, m) => {
- mywrite!(w, "&");
- enc_region(w, cx, re);
- enc_mutability(w, m);
- }
- }
-}
-
fn enc_unsafety(w: &mut SeekableMemWriter, p: ast::Unsafety) {
match p {
ast::Unsafety::Normal => mywrite!(w, "n"),
mywrite!(w, "]")
}
-fn enc_onceness(w: &mut SeekableMemWriter, o: ast::Onceness) {
- match o {
- ast::Once => mywrite!(w, "o"),
- ast::Many => mywrite!(w, "m")
- }
-}
-
pub fn enc_bare_fn_ty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>,
ft: &ty::BareFnTy<'tcx>) {
enc_unsafety(w, ft.unsafety);
pub fn enc_closure_ty<'a, 'tcx>(w: &mut SeekableMemWriter, cx: &ctxt<'a, 'tcx>,
ft: &ty::ClosureTy<'tcx>) {
enc_unsafety(w, ft.unsafety);
- enc_onceness(w, ft.onceness);
- enc_trait_store(w, cx, ft.store);
- enc_existential_bounds(w, cx, &ft.bounds);
enc_fn_sig(w, cx, &ft.sig);
enc_abi(w, ft.abi);
}
}
}
-impl tr for ty::TraitStore {
- fn tr(&self, dcx: &DecodeContext) -> ty::TraitStore {
- match *self {
- ty::RegionTraitStore(r, m) => {
- ty::RegionTraitStore(r.tr(dcx), m)
- }
- ty::UniqTraitStore => ty::UniqTraitStore
- }
- }
-}
-
// ______________________________________________________________________
// Encoding and decoding of freevar information
match self.cx {
Loop => {}
Closure => {
- self.sess.span_err(span,
- &format!("`{}` inside of a closure", name)[]);
+ span_err!(self.sess, span, E0267,
+ "`{}` inside of a closure", name);
}
Normal => {
- self.sess.span_err(span,
- &format!("`{}` outside of loop", name)[]);
+ span_err!(self.sess, span, E0268,
+ "`{}` outside of loop", name);
}
}
}
ast::ExprForLoop(ref pat, _, _, _) => {
let mut static_inliner = StaticInliner::new(cx.tcx);
is_refutable(cx, &*static_inliner.fold_pat((*pat).clone()), |uncovered_pat| {
- cx.tcx.sess.span_err(
- pat.span,
- &format!("refutable pattern in `for` loop binding: \
+ span_err!(cx.tcx.sess, pat.span, E0297,
+ "refutable pattern in `for` loop binding: \
`{}` not covered",
- pat_to_string(uncovered_pat))[]);
+ pat_to_string(uncovered_pat));
});
// Check legality of move bindings.
Some(true) => Some(vec![]),
Some(false) => None,
None => {
- cx.tcx.sess.span_err(pat_span, "mismatched types between arms");
+ span_err!(cx.tcx.sess, pat_span, E0298, "mismatched types between arms");
None
}
}
Some(true) => Some(vec![]),
Some(false) => None,
None => {
- cx.tcx.sess.span_err(pat_span, "mismatched types between arms");
+ span_err!(cx.tcx.sess, pat_span, E0299, "mismatched types between arms");
None
}
}
}
ast::PatMac(_) => {
- cx.tcx.sess.span_err(pat_span, "unexpanded macro");
+ span_err!(cx.tcx.sess, pat_span, E0300, "unexpanded macro");
None
}
};
head.map(|mut head| {
head.push_all(&r[..col]);
- head.push_all(&r[(col + 1)..]);
+ head.push_all(&r[col + 1..]);
head
})
}
_: LoanCause) {
match kind {
MutBorrow => {
- self.cx
- .tcx
- .sess
- .span_err(span,
- "cannot mutably borrow in a pattern guard")
+ span_err!(self.cx.tcx.sess, span, E0301,
+ "cannot mutably borrow in a pattern guard")
}
ImmBorrow | UniqueImmBorrow => {}
}
fn mutate(&mut self, _: NodeId, span: Span, _: cmt, mode: MutateMode) {
match mode {
JustWrite | WriteAndRead => {
- self.cx
- .tcx
- .sess
- .span_err(span, "cannot assign in a pattern guard")
+ span_err!(self.cx.tcx.sess, span, E0302, "cannot assign in a pattern guard")
}
Init => {}
}
impl<'a, 'b, 'tcx, 'v> Visitor<'v> for AtBindingPatternVisitor<'a, 'b, 'tcx> {
fn visit_pat(&mut self, pat: &Pat) {
if !self.bindings_allowed && pat_is_binding(&self.cx.tcx.def_map, pat) {
- self.cx.tcx.sess.span_err(pat.span,
+ span_err!(self.cx.tcx.sess, pat.span, E0303,
"pattern bindings are not allowed \
after an `@`");
}
impl<'a, 'ast, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a, 'ast> {
fn visit_item(&mut self, it: &ast::Item) {
if self.idstack.iter().any(|x| x == &(it.id)) {
- self.sess.span_err(self.root_it.span, "recursive constant");
+ span_err!(self.sess, self.root_it.span, E0265, "recursive constant");
return;
}
self.idstack.push(it.id);
self.visit_item(item),
ast_map::NodeForeignItem(_) => {},
_ => {
- self.sess.span_err(e.span,
- &format!("expected item, found {}",
- self.ast_map.node_to_string(def_id.node))[]);
+ span_err!(self.sess, e.span, E0266,
+ "expected item, found {}",
+ self.ast_map.node_to_string(def_id.node));
return;
},
}
let inner_ty = match a.sty {
ty::ty_uniq(_) => return Err(ty::terr_mismatch),
- ty::ty_rptr(_, mt_a) => mt_a.ty,
+ ty::ty_rptr(_, mt_a) => {
+ if !can_coerce_mutbls(mt_a.mutbl, mutbl_b) {
+ return Err(ty::terr_mutability);
+ }
+ mt_a.ty
+ }
_ => {
return self.subtype(a, b);
}
sig: sig})
}
- fn closure_tys(&self, a: &ty::ClosureTy<'tcx>,
- b: &ty::ClosureTy<'tcx>) -> cres<'tcx, ty::ClosureTy<'tcx>> {
-
- let store = match (a.store, b.store) {
- (ty::RegionTraitStore(a_r, a_m),
- ty::RegionTraitStore(b_r, b_m)) if a_m == b_m => {
- let r = try!(self.contraregions(a_r, b_r));
- ty::RegionTraitStore(r, a_m)
- }
-
- _ if a.store == b.store => {
- a.store
- }
-
- _ => {
- return Err(ty::terr_sigil_mismatch(expected_found(self, a.store, b.store)))
- }
- };
- let unsafety = try!(self.unsafeties(a.unsafety, b.unsafety));
- let onceness = try!(self.oncenesses(a.onceness, b.onceness));
- let bounds = try!(self.existential_bounds(&a.bounds, &b.bounds));
- let sig = try!(self.binders(&a.sig, &b.sig));
- let abi = try!(self.abi(a.abi, b.abi));
- Ok(ty::ClosureTy {
- unsafety: unsafety,
- onceness: onceness,
- store: store,
- bounds: bounds,
- sig: sig,
- abi: abi,
- })
- }
-
fn fn_sigs(&self, a: &ty::FnSig<'tcx>, b: &ty::FnSig<'tcx>) -> cres<'tcx, ty::FnSig<'tcx>> {
if a.variadic != b.variadic {
return Err(ty::terr_variadic_mismatch(expected_found(self, a.variadic, b.variadic)));
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region>;
- fn trait_stores(&self,
- vk: ty::terr_vstore_kind,
- a: ty::TraitStore,
- b: ty::TraitStore)
- -> cres<'tcx, ty::TraitStore> {
- debug!("{}.trait_stores(a={:?}, b={:?})", self.tag(), a, b);
-
- match (a, b) {
- (ty::RegionTraitStore(a_r, a_m),
- ty::RegionTraitStore(b_r, b_m)) if a_m == b_m => {
- self.contraregions(a_r, b_r).and_then(|r| {
- Ok(ty::RegionTraitStore(r, a_m))
- })
- }
-
- _ if a == b => {
- Ok(a)
- }
-
- _ => {
- Err(ty::terr_trait_stores_differ(vk, expected_found(self, a, b)))
- }
- }
- }
-
fn trait_refs(&self,
a: &ty::TraitRef<'tcx>,
b: &ty::TraitRef<'tcx>)
infer::EquatePredicate(_) => "equality predicate not satisfied",
};
- self.tcx.sess.span_err(
- trace.origin.span(),
- &format!("{}: {} ({})",
+ span_err!(self.tcx.sess, trace.origin.span(), E0308,
+ "{}: {} ({})",
message_root_str,
expected_found_str,
- ty::type_err_to_str(self.tcx, terr))[]);
+ ty::type_err_to_str(self.tcx, terr));
match trace.origin {
infer::MatchExpressionArm(_, arm_span) =>
match sub {
ty::ReFree(ty::FreeRegion {bound_region: ty::BrNamed(..), ..}) => {
// Does the required lifetime have a nice name we can print?
- self.tcx.sess.span_err(
- origin.span(),
- &format!("{} may not live long enough", labeled_user_string)[]);
+ span_err!(self.tcx.sess, origin.span(), E0309,
+ "{} may not live long enough", labeled_user_string);
self.tcx.sess.span_help(
origin.span(),
&format!(
ty::ReStatic => {
// Does the required lifetime have a nice name we can print?
- self.tcx.sess.span_err(
- origin.span(),
- &format!("{} may not live long enough", labeled_user_string)[]);
+ span_err!(self.tcx.sess, origin.span(), E0310,
+ "{} may not live long enough", labeled_user_string);
self.tcx.sess.span_help(
origin.span(),
&format!(
_ => {
// If not, be less specific.
- self.tcx.sess.span_err(
- origin.span(),
- &format!(
+ span_err!(self.tcx.sess, origin.span(), E0311,
"{} may not live long enough",
- labeled_user_string)[]);
+ labeled_user_string);
self.tcx.sess.span_help(
origin.span(),
&format!(
self.report_and_explain_type_error(trace, &terr);
}
infer::Reborrow(span) => {
- self.tcx.sess.span_err(
- span,
+ span_err!(self.tcx.sess, span, E0312,
"lifetime of reference outlines \
lifetime of borrowed content...");
note_and_explain_region(
"");
}
infer::ReborrowUpvar(span, ref upvar_id) => {
- self.tcx.sess.span_err(
- span,
- &format!("lifetime of borrowed pointer outlives \
+ span_err!(self.tcx.sess, span, E0313,
+ "lifetime of borrowed pointer outlives \
lifetime of captured variable `{}`...",
ty::local_var_name_str(self.tcx,
upvar_id.var_id)
.get()
- .to_string())[]);
+ .to_string());
note_and_explain_region(
self.tcx,
"...the borrowed pointer is valid for ",
"");
}
infer::InfStackClosure(span) => {
- self.tcx.sess.span_err(
- span,
+ span_err!(self.tcx.sess, span, E0314,
"closure outlives stack frame");
note_and_explain_region(
self.tcx,
"");
}
infer::InvokeClosure(span) => {
- self.tcx.sess.span_err(
- span,
+ span_err!(self.tcx.sess, span, E0315,
"cannot invoke closure outside of its lifetime");
note_and_explain_region(
self.tcx,
ContravariantLifetimeItem, "contravariant_lifetime", contravariant_lifetime;
InvariantLifetimeItem, "invariant_lifetime", invariant_lifetime;
- NoSendItem, "no_send_bound", no_send_bound;
NoCopyItem, "no_copy_bound", no_copy_bound;
- NoSyncItem, "no_sync_bound", no_sync_bound;
ManagedItem, "managed_bound", managed_bound;
NonZeroItem, "non_zero", non_zero;
},
_ => false
};
- self.ir.tcx.sess.span_err(
- sp, "not all control paths return a value");
+ span_err!(self.ir.tcx.sess, sp, E0269, "not all control paths return a value");
if ends_with_stmt {
let last_stmt = body.stmts.first().unwrap();
let original_span = original_sp(self.ir.tcx.sess.codemap(),
}
ty::FnDiverging
if self.live_on_entry(entry_ln, self.s.clean_exit_var).is_some() => {
- self.ir.tcx.sess.span_err(sp,
+ span_err!(self.ir.tcx.sess, sp, E0270,
"computation may converge in a function marked as diverging");
}
}
}
- sess.span_err(attr.span, "malformed recursion limit attribute, \
+ span_err!(sess, attr.span, E0296, "malformed recursion limit attribute, \
expected #![recursion_limit=\"N\"]");
}
}
use util::common::can_reach;
use std::cell::RefCell;
-// NOTE(stage0) remove import after a snapshot
-#[cfg(stage0)]
-use std::hash::{Hash};
use syntax::codemap::Span;
use syntax::{ast, visit};
use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local};
}
fn unresolved_lifetime_ref(&self, lifetime_ref: &ast::Lifetime) {
- self.sess.span_err(
- lifetime_ref.span,
- &format!("use of undeclared lifetime name `{}`",
- token::get_name(lifetime_ref.name))[]);
+ span_err!(self.sess, lifetime_ref.span, E0261,
+ "use of undeclared lifetime name `{}`",
+ token::get_name(lifetime_ref.name));
}
fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec<ast::LifetimeDef>) {
let special_idents = [special_idents::static_lifetime];
for lifetime in lifetimes.iter() {
if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) {
- self.sess.span_err(
- lifetime.lifetime.span,
- &format!("illegal lifetime parameter name: `{}`",
- token::get_name(lifetime.lifetime.name))
- []);
+ span_err!(self.sess, lifetime.lifetime.span, E0262,
+ "illegal lifetime parameter name: `{}`",
+ token::get_name(lifetime.lifetime.name));
}
}
let lifetime_j = &lifetimes[j];
if lifetime_i.lifetime.name == lifetime_j.lifetime.name {
- self.sess.span_err(
- lifetime_j.lifetime.span,
- &format!("lifetime name `{}` declared twice in \
+ span_err!(self.sess, lifetime_j.lifetime.span, E0263,
+ "lifetime name `{}` declared twice in \
the same scope",
- token::get_name(lifetime_j.lifetime.name))
- []);
+ token::get_name(lifetime_j.lifetime.name));
}
}
format!("lifetime name `{}` shadows another \
lifetime name that is already in scope",
token::get_name(lifetime.name)).as_slice());
- self.sess.span_help(
+ self.sess.span_note(
lifetime_def.span,
format!("shadowed lifetime `{}` declared here",
token::get_name(lifetime.name)).as_slice());
- self.sess.span_help(
+ self.sess.span_note(
lifetime.span,
"shadowed lifetimes are deprecated \
and will become a hard error before 1.0");
let predicate =
infcx.resolve_type_vars_if_possible(&obligation.predicate);
if !predicate.references_error() {
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0271,
"type mismatch resolving `{}`: {}",
predicate.user_string(infcx.tcx),
- ty::type_err_to_str(infcx.tcx, &error.err)).as_slice());
+ ty::type_err_to_str(infcx.tcx, &error.err));
note_obligation_cause(infcx, obligation);
}
}
Position::ArgumentNamed(s) => match generic_map.get(s) {
Some(val) => Some(val.as_slice()),
None => {
- infcx.tcx.sess
- .span_err(err_sp,
- format!("the #[rustc_on_unimplemented] \
+ span_err!(infcx.tcx.sess, err_sp, E0272,
+ "the #[rustc_on_unimplemented] \
attribute on \
trait definition for {} refers to \
non-existent type parameter {}",
- trait_str, s)
- .as_slice());
+ trait_str, s);
errored = true;
None
}
},
_ => {
- infcx.tcx.sess
- .span_err(err_sp,
- format!("the #[rustc_on_unimplemented] \
+ span_err!(infcx.tcx.sess, err_sp, E0273,
+ "the #[rustc_on_unimplemented] \
attribute on \
trait definition for {} must have named \
format arguments, \
eg `#[rustc_on_unimplemented = \
\"foo {{T}}\"]`",
- trait_str).as_slice());
+ trait_str);
errored = true;
None
}
report = Some(err);
}
} else {
- infcx.tcx.sess.span_err(err_sp,
- format!("the #[rustc_on_unimplemented] attribute on \
+ span_err!(infcx.tcx.sess, err_sp, E0274,
+ "the #[rustc_on_unimplemented] attribute on \
trait definition for {} must have a value, \
eg `#[rustc_on_unimplemented = \"foo\"]`",
- trait_str).as_slice());
+ trait_str);
}
break;
}
// We could track the stack here more precisely if we wanted, I imagine.
let predicate =
infcx.resolve_type_vars_if_possible(&obligation.predicate);
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0275,
"overflow evaluating the requirement `{}`",
- predicate.user_string(infcx.tcx)).as_slice());
+ predicate.user_string(infcx.tcx));
suggest_new_overflow_limit(infcx.tcx, obligation.cause.span);
SelectionError::Unimplemented => {
match &obligation.cause.code {
&ObligationCauseCode::CompareImplMethodObligation => {
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0276,
"the requirement `{}` appears on the impl \
method but not on the corresponding trait method",
- obligation.predicate.user_string(infcx.tcx)).as_slice());
+ obligation.predicate.user_string(infcx.tcx));;
}
_ => {
match obligation.predicate {
if !trait_predicate.references_error() {
let trait_ref = trait_predicate.to_poly_trait_ref();
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0277,
"the trait `{}` is not implemented for the type `{}`",
trait_ref.user_string(infcx.tcx),
- trait_ref.self_ty().user_string(infcx.tcx)).as_slice());
+ trait_ref.self_ty().user_string(infcx.tcx));
// Check if it has a custom "#[rustc_on_unimplemented]"
// error message, report with that message if it does
let custom_note = report_on_unimplemented(infcx, &*trait_ref.0,
let predicate = infcx.resolve_type_vars_if_possible(predicate);
let err = infcx.equality_predicate(obligation.cause.span,
&predicate).unwrap_err();
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0278,
"the requirement `{}` is not satisfied (`{}`)",
predicate.user_string(infcx.tcx),
- ty::type_err_to_str(infcx.tcx, &err)).as_slice());
+ ty::type_err_to_str(infcx.tcx, &err));
}
ty::Predicate::RegionOutlives(ref predicate) => {
let predicate = infcx.resolve_type_vars_if_possible(predicate);
let err = infcx.region_outlives_predicate(obligation.cause.span,
&predicate).unwrap_err();
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0279,
"the requirement `{}` is not satisfied (`{}`)",
predicate.user_string(infcx.tcx),
- ty::type_err_to_str(infcx.tcx, &err)).as_slice());
+ ty::type_err_to_str(infcx.tcx, &err));
}
ty::Predicate::Projection(..) | ty::Predicate::TypeOutlives(..) => {
let predicate =
infcx.resolve_type_vars_if_possible(&obligation.predicate);
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0280,
"the requirement `{}` is not satisfied",
- predicate.user_string(infcx.tcx)).as_slice());
+ predicate.user_string(infcx.tcx));
}
}
}
let expected_trait_ref = infcx.resolve_type_vars_if_possible(&*expected_trait_ref);
let actual_trait_ref = infcx.resolve_type_vars_if_possible(&*actual_trait_ref);
if !ty::type_is_error(actual_trait_ref.self_ty()) {
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0281,
"type mismatch: the type `{}` implements the trait `{}`, \
but the trait `{}` is required ({})",
expected_trait_ref.self_ty().user_string(infcx.tcx),
expected_trait_ref.user_string(infcx.tcx),
actual_trait_ref.user_string(infcx.tcx),
- ty::type_err_to_str(infcx.tcx, e)).as_slice());
+ ty::type_err_to_str(infcx.tcx, e));
note_obligation_cause(infcx, obligation);
}
}
infcx.tcx.lang_items.sized_trait()
.map_or(false, |sized_id| sized_id == trait_ref.def_id())
{
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0282,
"unable to infer enough type information about `{}`; \
type annotations required",
- self_ty.user_string(infcx.tcx)).as_slice());
+ self_ty.user_string(infcx.tcx));
} else {
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0283,
"type annotations required: cannot resolve `{}`",
- predicate.user_string(infcx.tcx)).as_slice());
+ predicate.user_string(infcx.tcx));;
note_obligation_cause(infcx, obligation);
}
}
_ => {
if !infcx.tcx.sess.has_errors() {
- infcx.tcx.sess.span_err(
- obligation.cause.span,
- format!(
+ span_err!(infcx.tcx.sess, obligation.cause.span, E0284,
"type annotations required: cannot resolve `{}`",
- predicate.user_string(infcx.tcx)).as_slice());
+ predicate.user_string(infcx.tcx));;
note_obligation_cause(infcx, obligation);
}
}
false
}
Err(Overflow) => {
- infcx.tcx.sess.span_err(
- span,
- format!("overflow evaluating whether `{}` is `{}`",
- ty.user_string(infcx.tcx),
- bound.user_string(infcx.tcx)).as_slice());
+ span_err!(infcx.tcx.sess, span, E0285,
+ "overflow evaluating whether `{}` is `{}`",
+ ty.user_string(infcx.tcx),
+ bound.user_string(infcx.tcx));
suggest_new_overflow_limit(infcx.tcx, span);
false
}
let tcx = this.tcx();
match bound {
ty::BoundSend => {
- if
- Some(def_id) == tcx.lang_items.no_send_bound() ||
- Some(def_id) == tcx.lang_items.managed_bound()
- {
+ if Some(def_id) == tcx.lang_items.managed_bound() {
return Err(Unimplemented)
}
}
ty::BoundSync => {
if
- Some(def_id) == tcx.lang_items.no_sync_bound() ||
Some(def_id) == tcx.lang_items.managed_bound() ||
Some(def_id) == tcx.lang_items.unsafe_type()
{
pub use self::InferRegion::*;
pub use self::ImplOrTraitItemId::*;
pub use self::UnboxedClosureKind::*;
-pub use self::TraitStore::*;
pub use self::ast_ty_to_ty_cache_entry::*;
pub use self::Variance::*;
pub use self::AutoAdjustment::*;
use middle::ty_fold::{self, TypeFoldable, TypeFolder};
use middle::ty_walk::TypeWalker;
use util::ppaux::{note_and_explain_region, bound_region_ptr_to_string};
-use util::ppaux::{trait_store_to_string, ty_to_string};
+use util::ppaux::ty_to_string;
use util::ppaux::{Repr, UserString};
use util::common::{memoized, ErrorReported};
use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
use arena::TypedArena;
use std::borrow::{BorrowFrom, Cow};
use std::cell::{Cell, RefCell};
-use std::cmp::{self, Ordering};
+use std::cmp;
use std::fmt::{self, Show};
use std::hash::{Hash, Writer, SipHasher, Hasher};
use std::mem;
pub mutbl: ast::Mutability,
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Show)]
-pub enum TraitStore {
- /// Box<Trait>
- UniqTraitStore,
- /// &Trait and &mut Trait
- RegionTraitStore(Region, ast::Mutability),
-}
-
#[derive(Clone, Copy, Show)]
pub struct field_ty {
pub name: Name,
#[derive(Clone, PartialEq, Eq, Hash, Show)]
pub struct ClosureTy<'tcx> {
pub unsafety: ast::Unsafety,
- pub onceness: ast::Onceness,
- pub store: TraitStore,
- pub bounds: ExistentialBounds<'tcx>,
- pub sig: PolyFnSig<'tcx>,
pub abi: abi::Abi,
+ pub sig: PolyFnSig<'tcx>,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)]
terr_onceness_mismatch(expected_found<Onceness>),
terr_abi_mismatch(expected_found<abi::Abi>),
terr_mutability,
- terr_sigil_mismatch(expected_found<TraitStore>),
terr_box_mutability,
terr_ptr_mutability,
terr_ref_mutability,
terr_regions_no_overlap(Region, Region),
terr_regions_insufficiently_polymorphic(BoundRegion, Region),
terr_regions_overly_polymorphic(BoundRegion, Region),
- terr_trait_stores_differ(terr_vstore_kind, expected_found<TraitStore>),
terr_sorts(expected_found<Ty<'tcx>>),
terr_integer_as_char,
terr_int_mismatch(expected_found<IntVarValue>),
ty_fn_sig(fty).inputs()
}
-pub fn ty_closure_store(fty: Ty) -> TraitStore {
- match fty.sty {
- ty_unboxed_closure(..) => {
- // Close enough for the purposes of all the callers of this
- // function (which is soon to be deprecated anyhow).
- UniqTraitStore
- }
- ref s => {
- panic!("ty_closure_store() called on non-closure type: {:?}", s)
- }
- }
-}
-
pub fn ty_fn_ret<'tcx>(fty: Ty<'tcx>) -> Binder<FnOutput<'tcx>> {
match fty.sty {
ty_bare_fn(_, ref f) => f.sig.output(),
/// afterwards to present additional details, particularly when it comes to lifetime-related
/// errors.
pub fn type_err_to_str<'tcx>(cx: &ctxt<'tcx>, err: &type_err<'tcx>) -> String {
- fn tstore_to_closure(s: &TraitStore) -> String {
- match s {
- &UniqTraitStore => "proc".to_string(),
- &RegionTraitStore(..) => "closure".to_string()
- }
- }
-
match *err {
terr_cyclic_ty => "cyclic type of infinite size".to_string(),
terr_mismatch => "types differ".to_string(),
values.expected,
values.found)
}
- terr_sigil_mismatch(values) => {
- format!("expected {}, found {}",
- tstore_to_closure(&values.expected),
- tstore_to_closure(&values.found))
- }
terr_mutability => "values differ in mutability".to_string(),
terr_box_mutability => {
"boxed values differ in mutability".to_string()
found bound lifetime parameter {}",
bound_region_ptr_to_string(cx, br))
}
- terr_trait_stores_differ(_, ref values) => {
- format!("trait storage differs: expected `{}`, found `{}`",
- trait_store_to_string(cx, (*values).expected),
- trait_store_to_string(cx, (*values).found))
- }
terr_sorts(values) => {
// A naive approach to making sure that we're not reporting silly errors such as:
// (expected closure, found closure).
cx.sess.bug("couldn't find associated type parameter index")
}
-#[derive(Copy, PartialEq, Eq)]
-pub struct AssociatedTypeInfo {
- pub def_id: ast::DefId,
- pub index: uint,
- pub name: ast::Name,
-}
-
-impl PartialOrd for AssociatedTypeInfo {
- fn partial_cmp(&self, other: &AssociatedTypeInfo) -> Option<Ordering> {
- Some(self.index.cmp(&other.index))
- }
-}
-
-impl Ord for AssociatedTypeInfo {
- fn cmp(&self, other: &AssociatedTypeInfo) -> Ordering {
- self.index.cmp(&other.index)
- }
-}
-
pub fn trait_item_def_ids(cx: &ctxt, id: ast::DefId)
-> Rc<Vec<ImplOrTraitItemId>> {
lookup_locally_or_in_crate_store("trait_item_def_ids",
discriminant = val as Disr
}
Ok(_) => {
- cx.sess
- .span_err(e.span,
+ span_err!(cx.sess, e.span, E0304,
"expected signed integer constant");
}
Err(ref err) => {
- cx.sess
- .span_err(e.span,
- &format!("expected constant: {}",
- *err)[]);
+ span_err!(cx.sess, e.span, E0305,
+ "expected constant: {}",
+ *err);
}
},
None => {}
const_eval::const_binary(_) =>
"binary array"
};
- tcx.sess.span_err(count_expr.span, &format!(
+ span_err!(tcx.sess, count_expr.span, E0306,
"expected positive integer for repeat count, found {}",
- found)[]);
+ found);
}
Err(_) => {
let found = match count_expr.node {
_ =>
"non-constant expression"
};
- tcx.sess.span_err(count_expr.span, &format!(
+ span_err!(tcx.sess, count_expr.span, E0307,
"expected constant integer for repeat count, found {}",
- found)[]);
+ found);
}
}
0
impl<'tcx> Repr<'tcx> for ClosureTy<'tcx> {
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
- format!("ClosureTy({},{},{:?},{},{},{})",
+ format!("ClosureTy({},{},{})",
self.unsafety,
- self.onceness,
- self.store,
- self.bounds.repr(tcx),
self.sig.repr(tcx),
self.abi)
}
self.free_substs.repr(tcx),
self.implicit_region_bound.repr(tcx),
self.caller_bounds.repr(tcx))
- }
}
+}
r
}
- fn fold_trait_store(&mut self, s: ty::TraitStore) -> ty::TraitStore {
- super_fold_trait_store(self, s)
- }
-
fn fold_existential_bounds(&mut self, s: &ty::ExistentialBounds<'tcx>)
-> ty::ExistentialBounds<'tcx> {
super_fold_existential_bounds(self, s)
}
}
-impl<'tcx> TypeFoldable<'tcx> for ty::TraitStore {
- fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> ty::TraitStore {
- folder.fold_trait_store(*self)
- }
-}
-
impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> {
fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Ty<'tcx> {
folder.fold_ty(*self)
-> ty::ClosureTy<'tcx>
{
ty::ClosureTy {
- store: fty.store.fold_with(this),
sig: fty.sig.fold_with(this),
unsafety: fty.unsafety,
- onceness: fty.onceness,
- bounds: fty.bounds.fold_with(this),
abi: fty.abi,
}
}
mutbl: mt.mutbl}
}
-pub fn super_fold_trait_store<'tcx, T: TypeFolder<'tcx>>(this: &mut T,
- trait_store: ty::TraitStore)
- -> ty::TraitStore {
- match trait_store {
- ty::UniqTraitStore => ty::UniqTraitStore,
- ty::RegionTraitStore(r, m) => {
- ty::RegionTraitStore(r.fold_with(this), m)
- }
- }
-}
-
pub fn super_fold_existential_bounds<'tcx, T: TypeFolder<'tcx>>(
this: &mut T,
bounds: &ty::ExistentialBounds<'tcx>)
self.items.missing.push(lang_items::$item);
}
} else)* {
- self.sess.span_err(span,
- format!("unknown external lang item: `{}`",
- name).as_slice());
+ span_err!(self.sess, span, E0264,
+ "unknown external lang item: `{}`",
+ name);
}
}
}
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
self.diagnostic().span_fatal(sp, msg)
}
+ pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
+ self.diagnostic().span_fatal_with_code(sp, msg, code)
+ }
pub fn fatal(&self, msg: &str) -> ! {
self.diagnostic().handler().fatal(msg)
}
pub fn span_err(&self, sp: Span, msg: &str) {
- // Conditions for enabling multi-line errors:
- if !msg.contains("mismatched types") &&
- !msg.contains("type mismatch resolving") &&
- !msg.contains("if and else have incompatible types") &&
- !msg.contains("if may be missing an else clause") &&
- !msg.contains("match arms have incompatible types") &&
- !msg.contains("structure constructor specifies a structure of type") {
- return self.diagnostic().span_err(sp, msg);
+ match split_msg_into_multilines(msg) {
+ Some(msg) => self.diagnostic().span_err(sp, &msg[]),
+ None => self.diagnostic().span_err(sp, msg)
}
-
- let first = Regex::new(r"[( ]expected").unwrap();
- let second = Regex::new(r" found").unwrap();
- let third = Regex::new(
- r"\((values differ|lifetime|cyclic type of infinite size)").unwrap();
-
- let mut new_msg = String::new();
- let mut head = 0u;
-
- // Insert `\n` before expected and found.
- for (pos1, pos2) in first.find_iter(msg).zip(
- second.find_iter(msg)) {
- new_msg = new_msg +
- // A `(` may be preceded by a space and it should be trimmed
- msg[head..pos1.0].trim_right() + // prefix
- "\n" + // insert before first
- &msg[pos1.0..pos1.1] + // insert what first matched
- &msg[pos1.1..pos2.0] + // between matches
- "\n " + // insert before second
- // 123
- // `expected` is 3 char longer than `found`. To align the types, `found` gets
- // 3 spaces prepended.
- &msg[pos2.0..pos2.1]; // insert what second matched
-
- head = pos2.1;
- }
-
- let mut tail = &msg[head..];
- // Insert `\n` before any remaining messages which match.
- for pos in third.find_iter(tail).take(1) {
- // The end of the message may just be wrapped in `()` without `expected`/`found`.
- // Push this also to a new line and add the final tail after.
- new_msg = new_msg +
- // `(` is usually preceded by a space and should be trimmed.
- tail[..pos.0].trim_right() + // prefix
- "\n" + // insert before paren
- &tail[pos.0..]; // append the tail
-
- tail = "";
- }
-
- new_msg.push_str(tail);
- self.diagnostic().span_err(sp, &new_msg[])
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
- self.diagnostic().span_err_with_code(sp, msg, code)
+ match split_msg_into_multilines(msg) {
+ Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[], code),
+ None => self.diagnostic().span_err_with_code(sp, msg, code)
+ }
}
pub fn err(&self, msg: &str) {
self.diagnostic().handler().err(msg)
}
}
+fn split_msg_into_multilines(msg: &str) -> Option<String> {
+ // Conditions for enabling multi-line errors:
+ if !msg.contains("mismatched types") &&
+ !msg.contains("type mismatch resolving") &&
+ !msg.contains("if and else have incompatible types") &&
+ !msg.contains("if may be missing an else clause") &&
+ !msg.contains("match arms have incompatible types") &&
+ !msg.contains("structure constructor specifies a structure of type") {
+ return None
+ }
+
+ let first = Regex::new(r"[( ]expected").unwrap();
+ let second = Regex::new(r" found").unwrap();
+ let third = Regex::new(
+ r"\((values differ|lifetime|cyclic type of infinite size)").unwrap();
+
+ let mut new_msg = String::new();
+ let mut head = 0u;
+
+ // Insert `\n` before expected and found.
+ for (pos1, pos2) in first.find_iter(msg).zip(
+ second.find_iter(msg)) {
+ new_msg = new_msg +
+ // A `(` may be preceded by a space and it should be trimmed
+ msg[head..pos1.0].trim_right() + // prefix
+ "\n" + // insert before first
+ &msg[pos1.0..pos1.1] + // insert what first matched
+ &msg[pos1.1..pos2.0] + // between matches
+ "\n " + // insert before second
+ // 123
+ // `expected` is 3 char longer than `found`. To align the types, `found` gets
+ // 3 spaces prepended.
+ &msg[pos2.0..pos2.1]; // insert what second matched
+
+ head = pos2.1;
+ }
+
+ let mut tail = &msg[head..];
+ // Insert `\n` before any remaining messages which match.
+ for pos in third.find_iter(tail).take(1) {
+ // The end of the message may just be wrapped in `()` without `expected`/`found`.
+ // Push this also to a new line and add the final tail after.
+ new_msg = new_msg +
+ // `(` is usually preceded by a space and should be trimmed.
+ tail[..pos.0].trim_right() + // prefix
+ "\n" + // insert before paren
+ &tail[pos.0..]; // append the tail
+
+ tail = "";
+ }
+
+ new_msg.push_str(tail);
+
+ return Some(new_msg)
+}
+
pub fn build_session(sopts: config::Options,
local_crate_source_file: Option<Path>,
registry: diagnostics::registry::Registry)
fn explain_span(cx: &ctxt, heading: &str, span: Span)
-> (String, Option<Span>) {
let lo = cx.sess.codemap().lookup_char_pos_adj(span.lo);
- (format!("the {} at {}:{}", heading, lo.line, lo.col.to_uint()),
+ (format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize()),
Some(span))
}
}
ty_to_string(cx, m.ty))
}
-pub fn trait_store_to_string(cx: &ctxt, s: ty::TraitStore) -> String {
- match s {
- ty::UniqTraitStore => "Box ".to_string(),
- ty::RegionTraitStore(r, m) => {
- format!("{}{}", region_ptr_to_string(cx, r), mutability_to_string(m))
- }
- }
-}
-
pub fn vec_map_to_string<T, F>(ts: &[T], f: F) -> String where
F: FnMut(&T) -> String,
{
_ => { }
}
- push_sig_to_string(cx, &mut s, '(', ')', sig, "");
+ push_sig_to_string(cx, &mut s, '(', ')', sig);
match opt_def_id {
Some(def_id) => {
fn closure_to_string<'tcx>(cx: &ctxt<'tcx>, cty: &ty::ClosureTy<'tcx>) -> String {
let mut s = String::new();
- match cty.store {
- ty::UniqTraitStore => {}
- ty::RegionTraitStore(region, _) => {
- s.push_str(®ion_to_string(cx, "", true, region)[]);
- }
- }
-
match cty.unsafety {
ast::Unsafety::Normal => {}
ast::Unsafety::Unsafe => {
}
};
- let bounds_str = cty.bounds.user_string(cx);
-
- match cty.store {
- ty::UniqTraitStore => {
- assert_eq!(cty.onceness, ast::Once);
- s.push_str("proc");
- push_sig_to_string(cx, &mut s, '(', ')', &cty.sig,
- &bounds_str[]);
- }
- ty::RegionTraitStore(..) => {
- match cty.onceness {
- ast::Many => {}
- ast::Once => s.push_str("once ")
- }
- push_sig_to_string(cx, &mut s, '|', '|', &cty.sig,
- &bounds_str[]);
- }
- }
+ push_sig_to_string(cx, &mut s, '|', '|', &cty.sig);
s
}
s: &mut String,
bra: char,
ket: char,
- sig: &ty::PolyFnSig<'tcx>,
- bounds: &str) {
+ sig: &ty::PolyFnSig<'tcx>) {
s.push(bra);
let strs = sig.0.inputs
.iter()
}
s.push(ket);
- if !bounds.is_empty() {
- s.push_str(":");
- s.push_str(bounds);
- }
-
match sig.0.output {
ty::FnConverging(t) => {
if !ty::type_is_nil(t) {
0
};
- for t in tps[..(tps.len() - num_defaults)].iter() {
+ for t in tps[..tps.len() - num_defaults].iter() {
strs.push(ty_to_string(cx, *t))
}
format!("{}({}){}",
base,
if strs[0].starts_with("(") && strs[0].ends_with(",)") {
- &strs[0][1 .. (strs[0].len() - 2)] // Remove '(' and ',)'
+ &strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)'
} else if strs[0].starts_with("(") && strs[0].ends_with(")") {
- &strs[0][1 .. (strs[0].len() - 1)] // Remove '(' and ')'
+ &strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')'
} else {
&strs[0][]
},
}
}
-impl<'tcx> Repr<'tcx> for ty::TraitStore {
- fn repr(&self, tcx: &ctxt) -> String {
- trait_store_to_string(tcx, *self)
- }
-}
-
impl<'tcx> Repr<'tcx> for ty::BuiltinBound {
fn repr(&self, _tcx: &ctxt) -> String {
format!("{:?}", *self)
// While we have at least a full buffer size chunk's worth of data, process that data
// without copying it into the buffer
while input.len() - i >= size {
- func(&input[i..(i + size)]);
+ func(&input[i..i + size]);
i += size;
}
StrictVersionHashVisitor { st: st }
}
- // To off-load the bulk of the hash-computation on deriving(Hash),
+ // To off-load the bulk of the hash-computation on #[derive(Hash)],
// we define a set of enums corresponding to the content that our
// crate visitor will encounter as it traverses the ast.
//
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use target::Target;
+
+pub fn target() -> Target {
+ let mut base = super::linux_base::opts();
+ base.pre_link_args.push("-Wl,--allow-multiple-definition".to_string());
+ base.position_independent_executables = true;
+ Target {
+ data_layout: "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
+ f32:32:32-f64:64:64-v64:64:64-v128:128:128-a:0:64-\
+ n32:64-S128".to_string(),
+ llvm_target: "aarch64-linux-android".to_string(),
+ target_endian: "little".to_string(),
+ target_pointer_width: "64".to_string(),
+ arch: "aarch64".to_string(),
+ target_os: "android".to_string(),
+ options: base,
+ }
+}
mod arm_unknown_linux_gnueabi;
mod arm_unknown_linux_gnueabihf;
mod aarch64_apple_ios;
+mod aarch64_linux_android;
mod aarch64_unknown_linux_gnu;
mod i686_apple_darwin;
mod i686_pc_windows_gnu;
i386_apple_ios,
x86_64_apple_ios,
aarch64_apple_ios,
+ aarch64_linux_android,
armv7_apple_ios,
armv7s_apple_ios,
use rustc::lint;
use rustc::metadata;
use rustc::metadata::creader::CrateOrString::Str;
-use rustc::DIAGNOSTICS;
use rustc::util::common::time;
use std::cmp::Ordering::Equal;
None => return
};
- let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS);
+ let descriptions = diagnostics_registry();
match matches.opt_str("explain") {
Some(ref code) => {
match descriptions.find_description(&code[]) {
}
}
+pub fn diagnostics_registry() -> diagnostics::registry::Registry {
+ use syntax::diagnostics::registry::Registry;
+
+ let all_errors = Vec::new() +
+ rustc::diagnostics::DIAGNOSTICS.as_slice() +
+ rustc_typeck::diagnostics::DIAGNOSTICS.as_slice() +
+ rustc_resolve::diagnostics::DIAGNOSTICS.as_slice();
+
+ Registry::new(&*all_errors)
+}
+
pub fn main() {
let args = std::os::args();
let result = run(args);
std::os::set_exit_status(result);
}
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(non_snake_case)]
+
+register_diagnostics! {
+ E0157,
+ E0153,
+ E0251, // a named type or value has already been imported in this module
+ E0252, // a named type or value has already been imported in this module
+ E0253, // not directly importable
+ E0254, // import conflicts with imported crate in this module
+ E0255, // import conflicts with value in this module
+ E0256, // import conflicts with type in this module
+ E0257, // inherent implementations are only allowen on types defined in the current module
+ E0258, // import conflicts with existing submodule
+ E0259, // an extern crate has already been imported into this module
+ E0260 // name conflicts with an external crate that has been imported into this module
+}
+
+__build_diagnostic_array! { DIAGNOSTICS }
use std::rc::{Rc, Weak};
use std::uint;
+// NB: This module needs to be declared first so diagnostics are
+// registered before they are used.
+pub mod diagnostics;
+
mod check_unused;
mod record_exports;
mod build_reduced_graph;
in this module",
namespace_name,
token::get_name(name).get());
- self.session.span_err(import_directive.span, msg.as_slice());
+ span_err!(self.session, import_directive.span, E0251, "{}", msg.as_slice());
} else {
let target = Target::new(containing_module.clone(),
name_bindings.clone(),
ValueNS => "value",
},
token::get_name(name).get());
- self.session.span_err(import_span, &msg[]);
+ span_err!(self.session, import_span, E0252, "{}", &msg[]);
}
Some(_) | None => {}
}
if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) {
let msg = format!("`{}` is not directly importable",
token::get_name(name));
- self.session.span_err(import_span, &msg[]);
+ span_err!(self.session, import_span, E0253, "{}", &msg[]);
}
}
crate in this module \
(maybe you meant `use {0}::*`?)",
token::get_name(name).get());
- self.session.span_err(import_span, &msg[]);
+ span_err!(self.session, import_span, E0254, "{}", &msg[]);
}
Some(_) | None => {}
}
let msg = format!("import `{}` conflicts with value \
in this module",
token::get_name(name).get());
- self.session.span_err(import_span, &msg[]);
+ span_err!(self.session, import_span, E0255, "{}", &msg[]);
if let Some(span) = value.value_span {
self.session.span_note(span,
"conflicting value here");
let msg = format!("import `{}` conflicts with type in \
this module",
token::get_name(name).get());
- self.session.span_err(import_span, &msg[]);
+ span_err!(self.session, import_span, E0256, "{}", &msg[]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
"note conflicting type here")
let msg = format!("inherent implementations \
are only allowed on types \
defined in the current module");
- self.session.span_err(span, &msg[]);
+ span_err!(self.session, span, E0257, "{}", &msg[]);
self.session.span_note(import_span,
"import from other module here")
}
let msg = format!("import `{}` conflicts with existing \
submodule",
token::get_name(name).get());
- self.session.span_err(import_span, &msg[]);
+ span_err!(self.session, import_span, E0258, "{}", &msg[]);
if let Some(span) = ty.type_span {
self.session.span_note(span,
"note conflicting module here")
}
if module.external_module_children.borrow().contains_key(&name) {
- self.session
- .span_err(span,
- &format!("an external crate named `{}` has already \
+ span_err!(self.session, span, E0259,
+ "an external crate named `{}` has already \
been imported into this module",
- token::get_name(name).get())[]);
+ token::get_name(name).get());
}
}
}
if module.external_module_children.borrow().contains_key(&name) {
- self.session
- .span_err(span,
- &format!("the name `{}` conflicts with an external \
+ span_err!(self.session, span, E0260,
+ "the name `{}` conflicts with an external \
crate that has been imported into this \
module",
- token::get_name(name).get())[]);
+ token::get_name(name).get());
}
}
let module_name = self.module_to_string(&*search_module);
let mut span = span;
let msg = if "???" == &module_name[] {
- span.hi = span.lo + Pos::from_uint(segment_name.get().len());
+ span.hi = span.lo + Pos::from_usize(segment_name.get().len());
match search_parent_externals(name,
&self.current_module) {
let msg = format!("Could not find `{}` in `{}`",
// idx +- 1 to account for the
// colons on either side
- &mpath[(idx + 1)..],
- &mpath[..(idx - 1)]);
+ &mpath[idx + 1..],
+ &mpath[..idx - 1]);
return Failed(Some((span, msg)));
},
None => {
for (i, rib) in ribs.iter().enumerate().rev() {
match rib.bindings.get(&name).cloned() {
Some(def_like) => {
- return self.upvarify(&ribs[(i + 1)..], def_like, span);
+ return self.upvarify(&ribs[i + 1..], def_like, span);
}
None => {
// Continue.
// against the archive.
if sess.lto() {
let name = cratepath.filename_str().unwrap();
- let name = &name[3..(name.len() - 5)]; // chop off lib/.rlib
+ let name = &name[3..name.len() - 5]; // chop off lib/.rlib
time(sess.time_passes(),
&format!("altering {}.rlib", name)[],
(), |()| {
let archive = ArchiveRO::open(&path).expect("wanted an rlib");
let file = path.filename_str().unwrap();
- let file = &file[3..(file.len() - 5)]; // chop off lib/.rlib
+ let file = &file[3..file.len() - 5]; // chop off lib/.rlib
debug!("reading {}", file);
for i in iter::count(0u, 1) {
let bc_encoded = time(sess.time_passes(),
}
fn read_from_le_bytes<T: Int>(bytes: &[u8], position_in_bytes: uint) -> T {
- let byte_data = &bytes[position_in_bytes..(position_in_bytes + mem::size_of::<T>())];
+ let byte_data = &bytes[position_in_bytes..position_in_bytes + mem::size_of::<T>()];
let data = unsafe {
*(byte_data.as_ptr() as *const T)
};
output: &Path,
file_type: llvm::FileType) {
unsafe {
- let output = CString::from_slice(output.as_vec());
+ let output_c = CString::from_slice(output.as_vec());
let result = llvm::LLVMRustWriteOutputFile(
- target, pm, m, output.as_ptr(), file_type);
+ target, pm, m, output_c.as_ptr(), file_type);
if !result {
- llvm_err(handler, "could not write output".to_string());
+ llvm_err(handler, format!("could not write output to {}", output.display()));
}
}
}
if len <= 2 {
return;
}
- let sub_paths = &sub_paths[..(len-2)];
+ let sub_paths = &sub_paths[..len-2];
for &(ref span, ref qualname) in sub_paths.iter() {
self.fmt.sub_mod_ref_str(path.span,
*span,
format!("file_name,{},file_line,{},file_col,{},extent_start,{},extent_start_bytes,{},\
file_line_end,{},file_col_end,{},extent_end,{},extent_end_bytes,{}",
lo_loc.file.name,
- lo_loc.line, lo_loc.col.to_uint(), lo_pos.to_uint(), lo_pos_byte.to_uint(),
- hi_loc.line, hi_loc.col.to_uint(), hi_pos.to_uint(), hi_pos_byte.to_uint())
+ lo_loc.line, lo_loc.col.to_usize(), lo_pos.to_usize(), lo_pos_byte.to_usize(),
+ hi_loc.line, hi_loc.col.to_usize(), hi_pos.to_usize(), hi_pos_byte.to_usize())
}
// sub_span starts at span.lo, so we need to adjust the positions etc.
enter_match(bcx, dm, m, col, val, |pats| {
if pat_is_binding_or_wild(dm, &*pats[col]) {
let mut r = pats[..col].to_vec();
- r.push_all(&pats[(col + 1)..]);
+ r.push_all(&pats[col + 1..]);
Some(r)
} else {
None
let dm = &tcx.def_map;
let mut vals_left = vals[0u..col].to_vec();
- vals_left.push_all(&vals[(col + 1u)..]);
+ vals_left.push_all(&vals[col + 1u..]);
let ccx = bcx.fcx.ccx;
// Find a real id (we're adding placeholder wildcard patterns, but
}
}
-// Returns a pointer to the body for the box. The box may be an opaque
-// box. The result will be casted to the type of body_t, if it is statically
-// known.
-pub fn at_box_body<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- body_t: Ty<'tcx>, boxptr: ValueRef) -> ValueRef {
- let _icx = push_ctxt("at_box_body");
- let ccx = bcx.ccx();
- let ty = Type::at_box(ccx, type_of(ccx, body_t));
- let boxptr = PointerCast(bcx, boxptr, ty.ptr_to());
- GEPi(bcx, boxptr, &[0u, abi::BOX_FIELD_BODY])
-}
-
fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
info_ty: Ty<'tcx>, it: LangItem) -> ast::DefId {
match bcx.tcx().lang_items.require(it) {
.map(|arg| node_id_type(bcx, arg.id))
.collect::<Vec<_>>();
let monomorphized_arg_types = match closure_env.kind {
- closure::NotClosure | closure::BoxedClosure(..) => {
+ closure::NotClosure => {
monomorphized_arg_types
}
};
bcx = match closure_env.kind {
- closure::NotClosure | closure::BoxedClosure(..) => {
+ closure::NotClosure => {
copy_args_to_allocas(bcx,
arg_scope,
&decl.inputs[],
// lot more efficient) than doing str::as_c_str("", ...) every time.
pub fn noname() -> *const c_char {
static CNULL: c_char = 0;
- &CNULL as *const c_char
+ &CNULL
}
impl<'a, 'tcx> Builder<'a, 'tcx> {
}
_ => unreachable!(),
};
- let vec_len = llvec_len(&cls[(i + 1u)..]);
+ let vec_len = llvec_len(&cls[i + 1u..]);
let vec_ty = Type::vector(&elt_ty, vec_len as u64 * elts_per_word);
tys.push(vec_ty);
i += vec_len;
pub use self::ClosureKind::*;
-use back::abi;
use back::link::mangle_internal_name_by_path_and_seq;
-use llvm::ValueRef;
use middle::mem_categorization::Typer;
use trans::adt;
use trans::base::*;
use trans::build::*;
use trans::cleanup::{CleanupMethods, ScopeId};
use trans::common::*;
-use trans::datum::{Datum, Lvalue, rvalue_scratch_datum};
+use trans::datum::{Datum, rvalue_scratch_datum};
use trans::datum::{Rvalue, ByValue};
use trans::debuginfo;
use trans::expr;
use trans::monomorphize::{self, MonoId};
use trans::type_of::*;
-use trans::type_::Type;
-use middle::ty::{self, Ty, UnboxedClosureTyper};
+use middle::ty::{self, UnboxedClosureTyper};
use middle::subst::{Substs};
use session::config::FullDebugInfo;
-use util::ppaux::ty_to_string;
use syntax::ast;
use syntax::ast_util;
-// ___Good to know (tm)__________________________________________________
-//
-// The layout of a closure environment in memory is
-// roughly as follows:
-//
-// struct rust_opaque_box { // see rust_internal.h
-// unsigned ref_count; // obsolete (part of @T's header)
-// fn(void*) *drop_glue; // destructor (for proc)
-// rust_opaque_box *prev; // obsolete (part of @T's header)
-// rust_opaque_box *next; // obsolete (part of @T's header)
-// struct closure_data {
-// upvar1_t upvar1;
-// ...
-// upvarN_t upvarN;
-// }
-// };
-//
-// Note that the closure is itself a rust_opaque_box. This is true
-// even for ~fn and ||, because we wish to keep binary compatibility
-// between all kinds of closures. The allocation strategy for this
-// closure depends on the closure type. For a sendfn, the closure
-// (and the referenced type descriptors) will be allocated in the
-// exchange heap. For a fn, the closure is allocated in the task heap
-// and is reference counted. For a block, the closure is allocated on
-// the stack.
-//
-// ## Opaque closures and the embedded type descriptor ##
-//
-// One interesting part of closures is that they encapsulate the data
-// that they close over. So when I have a ptr to a closure, I do not
-// know how many type descriptors it contains nor what upvars are
-// captured within. That means I do not know precisely how big it is
-// nor where its fields are located. This is called an "opaque
-// closure".
-//
-// Typically an opaque closure suffices because we only manipulate it
-// by ptr. The routine Type::at_box().ptr_to() returns an appropriate
-// type for such an opaque closure; it allows access to the box fields,
-// but not the closure_data itself.
-//
-// But sometimes, such as when cloning or freeing a closure, we need
-// to know the full information. That is where the type descriptor
-// that defines the closure comes in handy. We can use its take and
-// drop glue functions to allocate/free data as needed.
-//
-// ## Subtleties concerning alignment ##
-//
-// It is important that we be able to locate the closure data *without
-// knowing the kind of data that is being bound*. This can be tricky
-// because the alignment requirements of the bound data affects the
-// alignment requires of the closure_data struct as a whole. However,
-// right now this is a non-issue in any case, because the size of the
-// rust_opaque_box header is always a multiple of 16-bytes, which is
-// the maximum alignment requirement we ever have to worry about.
-//
-// The only reason alignment matters is that, in order to learn what data
-// is bound, we would normally first load the type descriptors: but their
-// location is ultimately depend on their content! There is, however, a
-// workaround. We can load the tydesc from the rust_opaque_box, which
-// describes the closure_data struct and has self-contained derived type
-// descriptors, and read the alignment from there. It's just annoying to
-// do. Hopefully should this ever become an issue we'll have monomorphized
-// and type descriptors will all be a bad dream.
-//
-// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-#[derive(Copy)]
-pub struct EnvValue<'tcx> {
- action: ast::CaptureClause,
- datum: Datum<'tcx, Lvalue>
-}
-
-impl<'tcx> EnvValue<'tcx> {
- pub fn to_string<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> String {
- format!("{:?}({})", self.action, self.datum.to_string(ccx))
- }
-}
-
-// Given a closure ty, emits a corresponding tuple ty
-pub fn mk_closure_tys<'tcx>(tcx: &ty::ctxt<'tcx>,
- bound_values: &[EnvValue<'tcx>])
- -> Ty<'tcx> {
- // determine the types of the values in the env. Note that this
- // is the actual types that will be stored in the map, not the
- // logical types as the user sees them, so by-ref upvars must be
- // converted to ptrs.
- let bound_tys = bound_values.iter().map(|bv| {
- match bv.action {
- ast::CaptureByValue => bv.datum.ty,
- ast::CaptureByRef => ty::mk_mut_ptr(tcx, bv.datum.ty)
- }
- }).collect();
- let cdata_ty = ty::mk_tup(tcx, bound_tys);
- debug!("cdata_ty={}", ty_to_string(tcx, cdata_ty));
- return cdata_ty;
-}
-
-fn tuplify_box_ty<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
- let ptr = ty::mk_imm_ptr(tcx, tcx.types.i8);
- ty::mk_tup(tcx, vec!(tcx.types.uint, ty::mk_nil_ptr(tcx), ptr, ptr, t))
-}
-
-pub struct ClosureResult<'blk, 'tcx: 'blk> {
- llbox: ValueRef, // llvalue of ptr to closure
- cdata_ty: Ty<'tcx>, // type of the closure data
- bcx: Block<'blk, 'tcx> // final bcx
-}
-
-// Given a block context and a list of tydescs and values to bind
-// construct a closure out of them. If copying is true, it is a
-// heap allocated closure that copies the upvars into environment.
-// Otherwise, it is stack allocated and copies pointers to the upvars.
-pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- bound_values: Vec<EnvValue<'tcx>>)
- -> ClosureResult<'blk, 'tcx> {
- let _icx = push_ctxt("closure::store_environment");
- let ccx = bcx.ccx();
- let tcx = ccx.tcx();
-
- // compute the type of the closure
- let cdata_ty = mk_closure_tys(tcx, &bound_values[]);
-
- // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
- // tuple. This could be a ptr in uniq or a box or on stack,
- // whatever.
- let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
- let cboxptr_ty = ty::mk_ptr(tcx, ty::mt {ty:cbox_ty, mutbl:ast::MutImmutable});
- let llboxptr_ty = type_of(ccx, cboxptr_ty);
-
- // If there are no bound values, no point in allocating anything.
- if bound_values.is_empty() {
- return ClosureResult {llbox: C_null(llboxptr_ty),
- cdata_ty: cdata_ty,
- bcx: bcx};
- }
-
- // allocate closure in the heap
- let llbox = alloc_ty(bcx, cbox_ty, "__closure");
-
- let llbox = PointerCast(bcx, llbox, llboxptr_ty);
- debug!("tuplify_box_ty = {}", ty_to_string(tcx, cbox_ty));
-
- // Copy expr values into boxed bindings.
- let mut bcx = bcx;
- for (i, bv) in bound_values.into_iter().enumerate() {
- debug!("Copy {} into closure", bv.to_string(ccx));
-
- if ccx.sess().asm_comments() {
- add_comment(bcx, &format!("Copy {} into closure",
- bv.to_string(ccx))[]);
- }
-
- let bound_data = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_BODY, i]);
-
- match bv.action {
- ast::CaptureByValue => {
- bcx = bv.datum.store_to(bcx, bound_data);
- }
- ast::CaptureByRef => {
- Store(bcx, bv.datum.to_llref(), bound_data);
- }
- }
- }
-
- ClosureResult { llbox: llbox, cdata_ty: cdata_ty, bcx: bcx }
-}
-
-// Given a context and a list of upvars, build a closure. This just
-// collects the upvars and packages them up for store_environment.
-fn build_closure<'blk, 'tcx>(bcx0: Block<'blk, 'tcx>,
- freevar_mode: ast::CaptureClause,
- freevars: &Vec<ty::Freevar>)
- -> ClosureResult<'blk, 'tcx> {
- let _icx = push_ctxt("closure::build_closure");
-
- // If we need to, package up the iterator body to call
- let bcx = bcx0;
-
- // Package up the captured upvars
- let mut env_vals = Vec::new();
- for freevar in freevars.iter() {
- let datum = expr::trans_local_var(bcx, freevar.def);
- env_vals.push(EnvValue {action: freevar_mode, datum: datum});
- }
-
- store_environment(bcx, env_vals)
-}
-
-// Given an enclosing block context, a new function context, a closure type,
-// and a list of upvars, generate code to load and populate the environment
-// with the upvars and type descriptors.
-fn load_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- cdata_ty: Ty<'tcx>,
- freevars: &[ty::Freevar],
- store: ty::TraitStore)
- -> Block<'blk, 'tcx> {
- let _icx = push_ctxt("closure::load_environment");
-
- // Load a pointer to the closure data, skipping over the box header:
- let llcdata = at_box_body(bcx, cdata_ty, bcx.fcx.llenv.unwrap());
-
- // Store the pointer to closure data in an alloca for debug info because that's what the
- // llvm.dbg.declare intrinsic expects
- let env_pointer_alloca = if bcx.sess().opts.debuginfo == FullDebugInfo {
- let alloc = alloc_ty(bcx, ty::mk_mut_ptr(bcx.tcx(), cdata_ty), "__debuginfo_env_ptr");
- Store(bcx, llcdata, alloc);
- Some(alloc)
- } else {
- None
- };
-
- // Populate the upvars from the environment
- let mut i = 0u;
- for freevar in freevars.iter() {
- let mut upvarptr = GEPi(bcx, llcdata, &[0u, i]);
- let captured_by_ref = match store {
- ty::RegionTraitStore(..) => {
- upvarptr = Load(bcx, upvarptr);
- true
- }
- ty::UniqTraitStore => false
- };
- let def_id = freevar.def.def_id();
-
- bcx.fcx.llupvars.borrow_mut().insert(def_id.node, upvarptr);
- if let Some(env_pointer_alloca) = env_pointer_alloca {
- debuginfo::create_captured_var_metadata(
- bcx,
- def_id.node,
- env_pointer_alloca,
- i,
- captured_by_ref,
- freevar.span);
- }
-
- i += 1u;
- }
-
- bcx
-}
fn load_unboxed_closure_environment<'blk, 'tcx>(
bcx: Block<'blk, 'tcx>,
freevar_mode: ast::CaptureClause,
freevars: &[ty::Freevar])
-> Block<'blk, 'tcx> {
- let _icx = push_ctxt("closure::load_environment");
+ let _icx = push_ctxt("closure::load_unboxed_closure_environment");
// Special case for small by-value selfs.
let closure_id = ast_util::local_def(bcx.fcx.id);
bcx
}
-fn fill_fn_pair(bcx: Block, pair: ValueRef, llfn: ValueRef, llenvptr: ValueRef) {
- Store(bcx, llfn, GEPi(bcx, pair, &[0u, abi::FAT_PTR_ADDR]));
- let llenvptr = PointerCast(bcx, llenvptr, Type::i8p(bcx.ccx()));
- Store(bcx, llenvptr, GEPi(bcx, pair, &[0u, abi::FAT_PTR_EXTRA]));
-}
-
#[derive(PartialEq)]
pub enum ClosureKind<'tcx> {
NotClosure,
- // See load_environment.
- BoxedClosure(Ty<'tcx>, ty::TraitStore),
// See load_unboxed_closure_environment.
UnboxedClosure(ast::CaptureClause)
}
match self.kind {
NotClosure => bcx,
- BoxedClosure(cdata_ty, store) => {
- load_environment(bcx, cdata_ty, self.freevars, store)
- }
UnboxedClosure(freevar_mode) => {
load_unboxed_closure_environment(bcx, arg_scope, freevar_mode, self.freevars)
}
}
}
-/// Translates the body of a closure expression.
-///
-/// - `store`
-/// - `decl`
-/// - `body`
-/// - `id`: The id of the closure expression.
-/// - `cap_clause`: information about captured variables, if any.
-/// - `dest`: where to write the closure value, which must be a
-/// (fn ptr, env) pair
-pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- store: ty::TraitStore,
- decl: &ast::FnDecl,
- body: &ast::Block,
- id: ast::NodeId,
- dest: expr::Dest)
- -> Block<'blk, 'tcx> {
- let _icx = push_ctxt("closure::trans_expr_fn");
-
- let dest_addr = match dest {
- expr::SaveIn(p) => p,
- expr::Ignore => {
- return bcx; // closure construction is non-side-effecting
- }
- };
-
- let ccx = bcx.ccx();
- let tcx = bcx.tcx();
- let fty = node_id_type(bcx, id);
- let s = tcx.map.with_path(id, |path| {
- mangle_internal_name_by_path_and_seq(path, "closure")
- });
- let llfn = decl_internal_rust_fn(ccx, fty, &s[]);
-
- // set an inline hint for all closures
- set_inline_hint(llfn);
-
- let freevar_mode = tcx.capture_mode(id);
- let freevars: Vec<ty::Freevar> =
- ty::with_freevars(tcx, id, |fv| fv.iter().map(|&fv| fv).collect());
-
- let ClosureResult {
- llbox,
- cdata_ty,
- bcx
- } = build_closure(bcx, freevar_mode, &freevars);
-
- trans_closure(ccx,
- decl,
- body,
- llfn,
- bcx.fcx.param_substs,
- id,
- &[],
- ty::erase_late_bound_regions(ccx.tcx(), &ty::ty_fn_ret(fty)),
- ty::ty_fn_abi(fty),
- ClosureEnv::new(&freevars[],
- BoxedClosure(cdata_ty, store)));
- fill_fn_pair(bcx, dest_addr, llfn, llbox);
- bcx
-}
-
/// Returns the LLVM function declaration for an unboxed closure, creating it
/// if necessary. If the ID does not correspond to a closure ID, returns None.
pub fn get_or_create_declaration_if_unboxed_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
/// Generates a unique symbol based off the name given. This is used to create
/// unique symbols for things like closures.
pub fn gensym_name(name: &str) -> PathElem {
- let num = token::gensym(name).uint();
+ let num = token::gensym(name).usize();
// use one colon which will get translated to a period by the mangler, and
// we're guaranteed that `num` is globally unique for this crate.
PathName(token::gensym(&format!("{}:{}", name, num)[]))
!null_terminated as Bool);
let gsym = token::gensym("str");
- let buf = CString::from_vec(format!("str{}", gsym.uint()).into_bytes());
+ let buf = CString::from_vec(format!("str{}", gsym.usize()).into_bytes());
let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(sc).to_ref(), buf.as_ptr());
llvm::LLVMSetInitializer(g, sc);
llvm::LLVMSetGlobalConstant(g, True);
let lldata = C_bytes(cx, data);
let gsym = token::gensym("binary");
- let name = format!("binary{}", gsym.uint());
+ let name = format!("binary{}", gsym.usize());
let name = CString::from_vec(name.into_bytes());
let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(lldata).to_ref(),
name.as_ptr());
closure_ty: ty::ClosureTy<'tcx>,
unique_type_id: &mut String) {
let ty::ClosureTy { unsafety,
- onceness,
- store,
- ref bounds,
ref sig,
abi: _ } = closure_ty;
+
if unsafety == ast::Unsafety::Unsafe {
unique_type_id.push_str("unsafe ");
}
- if onceness == ast::Once {
- unique_type_id.push_str("once ");
- }
-
- match store {
- ty::UniqTraitStore => unique_type_id.push_str("~|"),
- ty::RegionTraitStore(_, ast::MutMutable) => {
- unique_type_id.push_str("&mut|")
- }
- ty::RegionTraitStore(_, ast::MutImmutable) => {
- unique_type_id.push_str("&|")
- }
- };
+ unique_type_id.push_str("|");
let sig = ty::erase_late_bound_regions(cx.tcx(), sig);
unique_type_id.push_str("!");
}
}
-
- unique_type_id.push(':');
-
- for bound in bounds.builtin_bounds.iter() {
- match bound {
- ty::BoundSend => unique_type_id.push_str("Send"),
- ty::BoundSized => unique_type_id.push_str("Sized"),
- ty::BoundCopy => unique_type_id.push_str("Copy"),
- ty::BoundSync => unique_type_id.push_str("Sync"),
- };
- unique_type_id.push('+');
- }
}
// Get the UniqueTypeId for an enum variant. Enum variants are not really
if let Some(code_snippet) = code_snippet {
let bytes = code_snippet.as_bytes();
- if bytes.len() > 0 && &bytes[(bytes.len()-1)..] == b"}" {
+ if bytes.len() > 0 && &bytes[bytes.len()-1..] == b"}" {
cleanup_span = Span {
lo: node_span.hi - codemap::BytePos(1),
hi: node_span.hi,
set_debug_location(cx, DebugLocation::new(scope,
loc.line,
- loc.col.to_uint()));
+ loc.col.to_usize()));
} else {
set_debug_location(cx, UnknownLocation);
}
set_debug_location(cx, DebugLocation::new(scope_metadata,
loc.line,
- loc.col.to_uint()));
+ loc.col.to_usize()));
unsafe {
let instr = llvm::LLVMDIBuilderInsertDeclareAtEnd(
DIB(cx),
let work_dir = cx.sess().working_dir.as_str().unwrap();
let file_name =
if full_path.starts_with(work_dir) {
- &full_path[(work_dir.len() + 1u)..full_path.len()]
+ &full_path[work_dir.len() + 1u..full_path.len()]
} else {
full_path
};
parent_scope,
file_metadata,
loc.line as c_uint,
- loc.col.to_uint() as c_uint)
+ loc.col.to_usize() as c_uint)
};
scope_stack.push(ScopeStackEntry { scope_metadata: scope_metadata,
parent_scope,
file_metadata,
loc.line as c_uint,
- loc.col.to_uint() as c_uint)
+ loc.col.to_usize() as c_uint)
};
scope_stack.push(ScopeStackEntry {
use trans::type_::Type;
use syntax::{ast, ast_util, codemap};
-use syntax::print::pprust::{expr_to_string};
use syntax::ptr::P;
use syntax::parse::token;
use std::rc::Rc;
// closure or an older, legacy style closure. Store this
// into a variable to ensure the the RefCell-lock is
// released before we recurse.
- let is_unboxed_closure =
- bcx.tcx().unboxed_closures.borrow().contains_key(&ast_util::local_def(expr.id));
- if is_unboxed_closure {
- closure::trans_unboxed_closure(bcx, &**decl, &**body, expr.id, dest)
- } else {
- let expr_ty = expr_ty(bcx, expr);
- let store = ty::ty_closure_store(expr_ty);
- debug!("translating block function {} with type {}",
- expr_to_string(expr), expr_ty.repr(tcx));
- closure::trans_expr_fn(bcx, store, &**decl, &**body, expr.id, dest)
- }
+ closure::trans_unboxed_closure(bcx, &**decl, &**body, expr.id, dest)
}
ast::ExprCall(ref f, ref args) => {
if bcx.tcx().is_method_call(expr.id) {
unsafe {
let tbl = C_struct(ccx, &components[], false);
let sym = token::gensym("vtable");
- let buf = CString::from_vec(format!("vtable{}", sym.uint()).into_bytes());
+ let buf = CString::from_vec(format!("vtable{}", sym.usize()).into_bytes());
let vt_gvar = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(),
buf.as_ptr());
llvm::LLVMSetInitializer(vt_gvar, tbl);
Type::vec(ccx, &Type::i8(ccx))
}
- // The box pointed to by @T.
- pub fn at_box(ccx: &CrateContext, ty: Type) -> Type {
- Type::struct_(ccx, &[
- ccx.int_type(), Type::glue_fn(ccx, Type::i8p(ccx)).ptr_to(),
- Type::i8p(ccx), Type::i8p(ccx), ty
- ], false)
- }
-
pub fn vtable_ptr(ccx: &CrateContext) -> Type {
Type::glue_fn(ccx, Type::i8p(ccx)).ptr_to().ptr_to()
}
-> Ty<'tcx>
{
if ty::binds_late_bound_regions(self.tcx(), &poly_trait_ref) {
- self.tcx().sess.span_err(
- span,
+ span_err!(self.tcx().sess, span, E0212,
"cannot extract an associated type from a higher-ranked trait bound \
in this context");
self.tcx().types.err
_item_name: ast::Name)
-> Ty<'tcx>
{
- self.tcx().sess.span_err(
- span,
+ span_err!(self.tcx().sess, span, E0213,
"associated types are not accepted in this context");
self.tcx().types.err
convert_angle_bracketed_parameters(this, rscope, data)
}
ast::ParenthesizedParameters(ref data) => {
- tcx.sess.span_err(
- path.span,
+ span_err!(tcx.sess, path.span, E0214,
"parenthesized parameters may only be used with a trait");
(Vec::new(), convert_parenthesized_parameters(this, data), Vec::new())
}
} else {
"expected"
};
- this.tcx().sess.span_fatal(span,
- &format!("wrong number of type arguments: {} {}, found {}",
+ span_fatal!(this.tcx().sess, span, E0243,
+ "wrong number of type arguments: {} {}, found {}",
expected,
required_ty_param_count,
- supplied_ty_param_count)[]);
+ supplied_ty_param_count);
} else if supplied_ty_param_count > formal_ty_param_count {
let expected = if required_ty_param_count < formal_ty_param_count {
"expected at most"
} else {
"expected"
};
- this.tcx().sess.span_fatal(span,
- &format!("wrong number of type arguments: {} {}, found {}",
+ span_fatal!(this.tcx().sess, span, E0244,
+ "wrong number of type arguments: {} {}, found {}",
expected,
formal_ty_param_count,
- supplied_ty_param_count)[]);
+ supplied_ty_param_count);
}
let mut substs = Substs::new_type(types, regions);
trait_ref
}
_ => {
- this.tcx().sess.span_fatal(
- ast_trait_ref.path.span,
- &format!("`{}` is not a trait",
- ast_trait_ref.path.user_string(this.tcx()))[]);
+ span_fatal!(this.tcx().sess, ast_trait_ref.path.span, E0245,
+ "`{}` is not a trait",
+ ast_trait_ref.path.user_string(this.tcx()));
}
}
}
if !this.tcx().sess.features.borrow().unboxed_closures &&
this.tcx().lang_items.fn_trait_kind(trait_def_id).is_some()
{
- this.tcx().sess.span_err(path.span,
+ span_err!(this.tcx().sess, path.span, E0215,
"angle-bracket notation is not stable when \
used with the `Fn` family of traits, use parentheses");
span_help!(this.tcx().sess, path.span,
if !this.tcx().sess.features.borrow().unboxed_closures &&
this.tcx().lang_items.fn_trait_kind(trait_def_id).is_none()
{
- this.tcx().sess.span_err(path.span,
+ span_err!(this.tcx().sess, path.span, E0216,
"parenthetical notation is only stable when \
used with the `Fn` family of traits");
span_help!(this.tcx().sess, path.span,
}
if candidates.len() > 1 {
- tcx.sess.span_err(
- binding.span,
- format!("ambiguous associated type: `{}` defined in multiple supertraits `{}`",
+ span_err!(tcx.sess, binding.span, E0217,
+ "ambiguous associated type: `{}` defined in multiple supertraits `{}`",
token::get_name(binding.item_name),
- candidates.user_string(tcx)).as_slice());
+ candidates.user_string(tcx));
return Err(ErrorReported);
}
let candidate = match candidates.pop() {
Some(c) => c,
None => {
- tcx.sess.span_err(
- binding.span,
- format!("no associated type `{}` defined in `{}`",
+ span_err!(tcx.sess, binding.span, E0218,
+ "no associated type `{}` defined in `{}`",
token::get_name(binding.item_name),
- trait_ref.user_string(tcx)).as_slice());
+ trait_ref.user_string(tcx));
return Err(ErrorReported);
}
};
if ty::binds_late_bound_regions(tcx, &candidate) {
- tcx.sess.span_err(
- binding.span,
- format!("associated type `{}` defined in higher-ranked supertrait `{}`",
+ span_err!(tcx.sess, binding.span, E0219,
+ "associated type `{}` defined in higher-ranked supertrait `{}`",
token::get_name(binding.item_name),
- candidate.user_string(tcx)).as_slice());
+ candidate.user_string(tcx));
return Err(ErrorReported);
}
pprust::ty_to_string(ty));
match ty.node {
ast::TyRptr(None, ref mut_ty) => {
- span_note!(this.tcx().sess, ty.span,
+ span_help!(this.tcx().sess, ty.span,
"perhaps you meant `&{}({} +{})`? (per RFC 438)",
ppaux::mutability_to_string(mut_ty.mutbl),
pprust::ty_to_string(&*mut_ty.ty),
pprust::bounds_to_string(bounds));
}
ast::TyRptr(Some(ref lt), ref mut_ty) => {
- span_note!(this.tcx().sess, ty.span,
+ span_help!(this.tcx().sess, ty.span,
"perhaps you meant `&{} {}({} +{})`? (per RFC 438)",
pprust::lifetime_to_string(lt),
ppaux::mutability_to_string(mut_ty.mutbl),
}
_ => {
- span_note!(this.tcx().sess, ty.span,
+ span_help!(this.tcx().sess, ty.span,
"perhaps you forgot parentheses? (per RFC 438)");
}
}
}
if suitable_bounds.len() == 0 {
- tcx.sess.span_err(ast_ty.span,
- format!("associated type `{}` not found for type parameter `{}`",
+ span_err!(tcx.sess, ast_ty.span, E0220,
+ "associated type `{}` not found for type parameter `{}`",
token::get_name(assoc_name),
- token::get_name(ty_param_name)).as_slice());
+ token::get_name(ty_param_name));
return this.tcx().types.err;
}
if suitable_bounds.len() > 1 {
- tcx.sess.span_err(ast_ty.span,
- format!("ambiguous associated type `{}` in bounds of `{}`",
+ span_err!(tcx.sess, ast_ty.span, E0221,
+ "ambiguous associated type `{}` in bounds of `{}`",
token::get_name(assoc_name),
- token::get_name(ty_param_name)).as_slice());
+ token::get_name(ty_param_name));
for suitable_bound in suitable_bounds.iter() {
span_note!(this.tcx().sess, ast_ty.span,
match ast_ty_to_ty_cache.get(&ast_ty.id) {
Some(&ty::atttce_resolved(ty)) => return ty,
Some(&ty::atttce_unresolved) => {
- tcx.sess.span_fatal(ast_ty.span,
+ span_fatal!(tcx.sess, ast_ty.span, E0246,
"illegal recursive type; insert an enum \
or struct in the cycle, if this is \
desired");
ast::TyParen(ref typ) => ast_ty_to_ty(this, rscope, &**typ),
ast::TyBareFn(ref bf) => {
if bf.decl.variadic && bf.abi != abi::C {
- tcx.sess.span_err(ast_ty.span,
+ span_err!(tcx.sess, ast_ty.span, E0222,
"variadic function must have C calling convention");
}
let bare_fn = ty_of_bare_fn(this, bf.unsafety, bf.abi, &*bf.decl);
ty::mk_self_type(tcx)
}
def::DefMod(id) => {
- tcx.sess.span_fatal(ast_ty.span,
- &format!("found module name used as a type: {}",
- tcx.map.node_to_string(id.node))[]);
+ span_fatal!(tcx.sess, ast_ty.span, E0247,
+ "found module name used as a type: {}",
+ tcx.map.node_to_string(id.node));
}
def::DefPrimTy(_) => {
panic!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call");
def::DefAssociatedTy(trait_type_id) => {
let path_str = tcx.map.path_to_string(
tcx.map.get_parent(trait_type_id.node));
- tcx.sess.span_err(ast_ty.span,
- &format!("ambiguous associated \
+ span_err!(tcx.sess, ast_ty.span, E0223,
+ "ambiguous associated \
type; specify the type \
using the syntax `<Type \
as {}>::{}`",
.last()
.unwrap()
.identifier)
- .get())[]);
+ .get());
this.tcx().types.err
}
def::DefAssociatedPath(provenance, assoc_ident) => {
associated_path_def_to_ty(this, ast_ty, provenance, assoc_ident.name)
}
_ => {
- tcx.sess.span_fatal(ast_ty.span,
- &format!("found value name used \
+ span_fatal!(tcx.sess, ast_ty.span, E0248,
+ "found value name used \
as a type: {:?}",
- a_def)[]);
+ a_def);
}
}
}
ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty),
Some(i as uint)),
_ => {
- tcx.sess.span_fatal(
- ast_ty.span, "expected constant expr for array length");
+ span_fatal!(tcx.sess, ast_ty.span, E0249,
+ "expected constant expr for array length");
}
}
}
Err(ref r) => {
- tcx.sess.span_fatal(
- ast_ty.span,
- &format!("expected constant expr for array \
+ span_fatal!(tcx.sess, ast_ty.span, E0250,
+ "expected constant expr for array \
length: {}",
- *r)[]);
+ *r);
}
}
}
pub fn ty_of_closure<'tcx>(
this: &AstConv<'tcx>,
unsafety: ast::Unsafety,
- onceness: ast::Onceness,
- bounds: ty::ExistentialBounds<'tcx>,
- store: ty::TraitStore,
decl: &ast::FnDecl,
abi: abi::Abi,
expected_sig: Option<ty::FnSig<'tcx>>)
ty::ClosureTy {
unsafety: unsafety,
- onceness: onceness,
- store: store,
- bounds: bounds,
abi: abi,
sig: ty::Binder(ty::FnSig {inputs: input_tys,
output: output_ty,
None,
&mut projection_bounds))
} else {
- this.tcx().sess.span_err(
- span,
+ span_err!(this.tcx().sess, span, E0224,
"at least one non-builtin trait is required for an object type");
None
};
if !trait_bounds.is_empty() {
let b = &trait_bounds[0];
- this.tcx().sess.span_err(
- b.trait_ref.path.span,
- &format!("only the builtin traits can be used \
- as closure or object bounds")[]);
+ span_err!(this.tcx().sess, b.trait_ref.path.span, E0225,
+ "only the builtin traits can be used \
+ as closure or object bounds");
}
let region_bound = compute_region_bound(this,
builtin_bounds.repr(tcx));
if explicit_region_bounds.len() > 1 {
- tcx.sess.span_err(
- explicit_region_bounds[1].span,
- format!("only a single explicit lifetime bound is permitted").as_slice());
+ span_err!(tcx.sess, explicit_region_bounds[1].span, E0226,
+ "only a single explicit lifetime bound is permitted");
}
if explicit_region_bounds.len() != 0 {
// error.
let r = derived_region_bounds[0];
if derived_region_bounds.slice_from(1).iter().any(|r1| r != *r1) {
- tcx.sess.span_err(
- span,
- &format!("ambiguous lifetime bound, \
- explicit lifetime bound required")[]);
+ span_err!(tcx.sess, span, E0227,
+ "ambiguous lifetime bound, \
+ explicit lifetime bound required");
}
return Some(r);
}
match rscope.default_region_bound(span) {
Some(r) => { r }
None => {
- this.tcx().sess.span_err(
- span,
- &format!("explicit lifetime bound required")[]);
+ span_err!(this.tcx().sess, span, E0228,
+ "explicit lifetime bound required");
ty::ReStatic
}
}
bindings: &[ConvertedBinding<'tcx>])
{
for binding in bindings.iter().take(1) {
- tcx.sess.span_err(
- binding.span,
+ span_err!(tcx.sess, binding.span, E0229,
"associated type bindings are not allowed here");
}
}
check_unboxed_closure(fcx, expr, kind, decl, body, None);
- fcx.ccx.tcx.sess.span_err(
- expr.span,
- "can't infer the \"kind\" of the closure, explicitly annotate it. e.g. \
- `|&:| {}`");
+ span_err!(fcx.ccx.tcx.sess, expr.span, E0187,
+ "can't infer the \"kind\" of the closure; explicitly annotate it; e.g. \
+ `|&:| {{}}`");
},
Some((sig, kind)) => {
check_unboxed_closure(fcx, expr, kind, decl, body, Some(sig));
let mut fn_ty = astconv::ty_of_closure(
fcx,
ast::Unsafety::Normal,
- ast::Many,
-
- // The `RegionTraitStore` and region_existential_bounds
- // are lies, but we ignore them so it doesn't matter.
- //
- // FIXME(pcwalton): Refactor this API.
- ty::region_existential_bound(ty::ReStatic),
- ty::RegionTraitStore(ty::ReStatic, ast::MutImmutable),
-
decl,
abi::RustCall,
expected_sig);
(&ty::StaticExplicitSelfCategory,
&ty::StaticExplicitSelfCategory) => {}
(&ty::StaticExplicitSelfCategory, _) => {
- tcx.sess.span_err(
- impl_m_span,
- format!("method `{}` has a `{}` declaration in the impl, \
+ span_err!(tcx.sess, impl_m_span, E0185,
+ "method `{}` has a `{}` declaration in the impl, \
but not in the trait",
token::get_name(trait_m.name),
ppaux::explicit_self_category_to_str(
- &impl_m.explicit_self)).as_slice());
+ &impl_m.explicit_self));
return;
}
(_, &ty::StaticExplicitSelfCategory) => {
- tcx.sess.span_err(
- impl_m_span,
- format!("method `{}` has a `{}` declaration in the trait, \
+ span_err!(tcx.sess, impl_m_span, E0186,
+ "method `{}` has a `{}` declaration in the trait, \
but not in the impl",
token::get_name(trait_m.name),
ppaux::explicit_self_category_to_str(
- &trait_m.explicit_self)).as_slice());
+ &trait_m.explicit_self));
return;
}
_ => {
// are zero. Since I don't quite know how to phrase things at
// the moment, give a kind of vague error message.
if trait_params.len() != impl_params.len() {
- tcx.sess.span_err(
- span,
- &format!("lifetime parameters or bounds on method `{}` do \
+ span_err!(tcx.sess, span, E0195,
+ "lifetime parameters or bounds on method `{}` do \
not match the trait declaration",
- token::get_name(impl_m.name))[]);
+ token::get_name(impl_m.name));
return false;
}
if !valid_out_of_scope_traits.is_empty() {
let mut candidates = valid_out_of_scope_traits;
candidates.sort();
+ candidates.dedup();
let msg = format!(
"methods from traits can only be called if the trait is in scope; \
the following {traits_are} implemented but not in scope, \
if candidates.len() > 0 {
// sort from most relevant to least relevant
candidates.sort_by(|a, b| a.cmp(b).reverse());
+ candidates.dedup();
let msg = format!(
"methods from traits can only be called if the trait is implemented and in scope; \
}) {
Some(_) => (),
None => {
- ccx.tcx.sess.span_err(attr.span,
- format!("there is no type parameter \
+ span_err!(ccx.tcx.sess, attr.span, E0230,
+ "there is no type parameter \
{} on trait {}",
- s, item.ident.as_str())
- .as_slice());
+ s, item.ident.as_str());
}
},
// `{:1}` and `{}` are not to be used
Position::ArgumentIs(_) | Position::ArgumentNext => {
- ccx.tcx.sess.span_err(attr.span,
+ span_err!(ccx.tcx.sess, attr.span, E0231,
"only named substitution \
parameters are allowed");
}
}
}
} else {
- ccx.tcx.sess.span_err(attr.span,
+ span_err!(ccx.tcx.sess, attr.span, E0232,
"this attribute must have a value, \
eg `#[rustc_on_unimplemented = \"foo\"]`")
}
let trait_did = match fcx.tcx().lang_items.require(IteratorItem) {
Ok(trait_did) => trait_did,
Err(ref err_string) => {
- fcx.tcx().sess.span_err(iterator_expr.span,
- &err_string[]);
+ span_err!(fcx.tcx().sess, iterator_expr.span, E0233,
+ "{}", &err_string[]);
return fcx.tcx().types.err
}
};
if !ty::type_is_error(true_expr_type) {
let ty_string = fcx.infcx().ty_to_string(true_expr_type);
- fcx.tcx().sess.span_err(iterator_expr.span,
- &format!("`for` loop expression has type `{}` which does \
+ span_err!(fcx.tcx().sess, iterator_expr.span, E0234,
+ "`for` loop expression has type `{}` which does \
not implement the `Iterator` trait; \
- maybe try .iter()",
- ty_string)[]);
+ maybe try .iter()", ty_string);
}
fcx.tcx().types.err
}
fcx.tcx().types.err
}
_ => {
- fcx.tcx().sess.span_err(iterator_expr.span,
- &format!("`next` method of the `Iterator` \
+ span_err!(fcx.tcx().sess, iterator_expr.span, E0239,
+ "`next` method of the `Iterator` \
trait has an unexpected type `{}`",
- fcx.infcx().ty_to_string(return_type))
- []);
+ fcx.infcx().ty_to_string(return_type));
fcx.tcx().types.err
}
}
Err(type_error) => {
let type_error_description =
ty::type_err_to_str(tcx, &type_error);
- fcx.tcx()
- .sess
- .span_err(path.span,
- &format!("structure constructor specifies a \
+ span_err!(fcx.tcx().sess, path.span, E0235,
+ "structure constructor specifies a \
structure of type `{}`, but this \
structure has type `{}`: {}",
fcx.infcx()
fcx.infcx()
.ty_to_string(
actual_structure_type),
- type_error_description)[]);
+ type_error_description);
ty::note_and_explain_type_err(tcx, &type_error);
}
}
ty::mk_struct(tcx, did, tcx.mk_substs(substs))
} else {
- tcx.sess.span_err(expr.span, "No lang item for range syntax");
+ span_err!(tcx.sess, expr.span, E0236, "no lang item for range syntax");
fcx.tcx().types.err
}
}
let substs = Substs::new_type(vec![], vec![]);
ty::mk_struct(tcx, did, tcx.mk_substs(substs))
} else {
- tcx.sess.span_err(expr.span, "No lang item for range syntax");
+ span_err!(tcx.sess, expr.span, E0237, "no lang item for range syntax");
fcx.tcx().types.err
}
}
}
ast::ParenthesizedParameters(ref data) => {
- fcx.tcx().sess.span_err(
- span,
+ span_err!(fcx.tcx().sess, span, E0238,
"parenthesized parameters may only be used with a trait");
push_explicit_parenthesized_parameters_from_segment_to_substs(
fcx, space, span, type_defs, data, substs);
"get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
Ok(t) => t,
- Err(s) => { tcx.sess.span_fatal(it.span, &s[]); }
+ Err(s) => { span_fatal!(tcx.sess, it.span, E0240, "{}", &s[]); }
};
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty,
{
let object_trait = object_trait(&object_trait_ty);
if !mutability_allowed(referent_mutbl, target_mutbl) {
- fcx.tcx().sess.span_err(source_expr.span,
+ span_err!(fcx.tcx().sess, source_expr.span, E0188,
"types differ in mutability");
} else {
// Ensure that if &'a T is cast to &'b Trait, then T : Trait
}
(_, &ty::ty_uniq(..)) => {
- fcx.ccx.tcx.sess.span_err(
- source_expr.span,
- &format!("can only cast an boxed pointer \
+ span_err!(fcx.ccx.tcx.sess, source_expr.span, E0189,
+ "can only cast a boxed pointer \
to a boxed object, not a {}",
- ty::ty_sort_string(fcx.tcx(), source_ty))[]);
+ ty::ty_sort_string(fcx.tcx(), source_ty));
}
(_, &ty::ty_rptr(..)) => {
- fcx.ccx.tcx.sess.span_err(
- source_expr.span,
- &format!("can only cast a &-pointer \
+ span_err!(fcx.ccx.tcx.sess, source_expr.span, E0190,
+ "can only cast a &-pointer \
to an &-object, not a {}",
- ty::ty_sort_string(fcx.tcx(), source_ty))[]);
+ ty::ty_sort_string(fcx.tcx(), source_ty));
}
_ => {
}
for (trait_def_id, name) in associated_types.into_iter() {
- tcx.sess.span_err(
- span,
- format!("the value of the associated type `{}` (from the trait `{}`) must be specified",
+ span_err!(tcx.sess, span, E0191,
+ "the value of the associated type `{}` (from the trait `{}`) must be specified",
name.user_string(tcx),
- ty::item_path_str(tcx, trait_def_id)).as_slice());
+ ty::item_path_str(tcx, trait_def_id));
}
}
match ccx.tcx.lang_items.to_builtin_kind(trait_ref.def_id) {
Some(ty::BoundSend) | Some(ty::BoundSync) => {}
Some(_) | None => {
- ccx.tcx.sess.span_err(
- item.span,
- format!("negative impls are currently \
- allowed just for `Send` and `Sync`").as_slice())
+ span_err!(ccx.tcx.sess, item.span, E0192,
+ "negative impls are currently \
+ allowed just for `Send` and `Sync`")
}
}
}
fn report_bound_error<'t>(tcx: &ty::ctxt<'t>,
span: Span,
bounded_ty: ty::Ty<'t>) {
- tcx.sess.span_err(
- span,
- format!("cannot bound type `{}`, where clause \
+ span_err!(tcx.sess, span, E0193,
+ "cannot bound type `{}`, where clause \
bounds may only be attached to types involving \
type parameters",
- bounded_ty.repr(tcx)).as_slice())
+ bounded_ty.repr(tcx))
}
fn is_ty_param(ty: ty::Ty) -> bool {
for method_param in generics.types.get_slice(subst::FnSpace).iter() {
if impl_params.contains(&method_param.name) {
- tcx.sess.span_err(
- span,
- &*format!("type parameter `{}` shadows another type parameter of the same name",
- token::get_name(method_param.name)));
+ span_err!(tcx.sess, span, E0194,
+ "type parameter `{}` shadows another type parameter of the same name",
+ token::get_name(method_param.name));
}
}
}
ResolvingUnboxedClosure(_) => {
let span = self.reason.span(self.tcx);
- self.tcx.sess.span_err(span,
+ span_err!(self.tcx.sess, span, E0196,
"cannot determine a type for this \
unboxed closure")
}
match trait_ref.self_ty().sty {
ty::ty_struct(..) | ty::ty_enum(..) => {}
_ => {
- self.tcx.sess.span_err(
- item.span,
- &format!("builtin traits can only be \
- implemented on structs or enums")[]);
+ span_err!(self.tcx.sess, item.span, E0209,
+ "builtin traits can only be \
+ implemented on structs or enums");
}
}
}
match ty::can_type_implement_copy(¶m_env, span, self_type) {
Ok(()) => {}
Err(ty::FieldDoesNotImplementCopy(name)) => {
- tcx.sess
- .span_err(span,
- &format!("the trait `Copy` may not be \
+ span_err!(tcx.sess, span, E0204,
+ "the trait `Copy` may not be \
implemented for this type; field \
`{}` does not implement `Copy`",
- token::get_name(name))[])
+ token::get_name(name))
}
Err(ty::VariantDoesNotImplementCopy(name)) => {
- tcx.sess
- .span_err(span,
- &format!("the trait `Copy` may not be \
+ span_err!(tcx.sess, span, E0205,
+ "the trait `Copy` may not be \
implemented for this type; variant \
`{}` does not implement `Copy`",
- token::get_name(name))[])
+ token::get_name(name))
}
Err(ty::TypeIsStructural) => {
- tcx.sess
- .span_err(span,
+ span_err!(tcx.sess, span, E0206,
"the trait `Copy` may not be implemented \
for this type; type is not a structure or \
enumeration")
}
Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => {
if !ty::has_attr(self.tcx, trait_def_id, "old_orphan_check") {
- self.tcx.sess.span_err(
- item.span,
- format!(
+ span_err!(self.tcx.sess, item.span, E0210,
"type parameter `{}` is not constrained by any local type; \
only traits defined in the current crate can be implemented \
for a type parameter",
- param_ty.user_string(self.tcx)).as_slice());
+ param_ty.user_string(self.tcx));
self.tcx.sess.span_note(
item.span,
format!("for a limited time, you can add \
match unsafety {
ast::Unsafety::Normal => { /* OK */ }
ast::Unsafety::Unsafe => {
- self.tcx.sess.span_err(
- item.span,
+ span_err!(self.tcx.sess, item.span, E0197,
"inherent impls cannot be declared as unsafe");
}
}
match (trait_def.unsafety, unsafety, polarity) {
(ast::Unsafety::Unsafe,
ast::Unsafety::Unsafe, ast::ImplPolarity::Negative) => {
- self.tcx.sess.span_err(
- item.span,
- format!("negative implementations are not unsafe").as_slice());
+ span_err!(self.tcx.sess, item.span, E0198,
+ "negative implementations are not unsafe");
}
(ast::Unsafety::Normal, ast::Unsafety::Unsafe, _) => {
- self.tcx.sess.span_err(
- item.span,
- format!("implementing the trait `{}` is not unsafe",
- trait_ref.user_string(self.tcx)).as_slice());
+ span_err!(self.tcx.sess, item.span, E0199,
+ "implementing the trait `{}` is not unsafe",
+ trait_ref.user_string(self.tcx));
}
(ast::Unsafety::Unsafe,
ast::Unsafety::Normal, ast::ImplPolarity::Positive) => {
- self.tcx.sess.span_err(
- item.span,
- format!("the trait `{}` requires an `unsafe impl` declaration",
- trait_ref.user_string(self.tcx)).as_slice());
+ span_err!(self.tcx.sess, item.span, E0200,
+ "the trait `{}` requires an `unsafe impl` declaration",
+ trait_ref.user_string(self.tcx));
}
(ast::Unsafety::Unsafe,
let mut seen_methods = FnvHashSet();
for m in ms {
if !seen_methods.insert(m.pe_ident().repr(tcx)) {
- tcx.sess.span_err(m.span, "duplicate method in trait impl");
+ span_err!(tcx.sess, m.span, E0201, "duplicate method in trait impl");
}
let m_def_id = local_def(m.id);
}
ast::TypeImplItem(ref typedef) => {
if opt_trait_ref.is_none() {
- tcx.sess.span_err(typedef.span,
+ span_err!(tcx.sess, typedef.span, E0202,
"associated items are not allowed in inherent impls");
}
assert!(ptr.bound_lifetimes.is_empty());
unbound = Some(ptr.trait_ref.clone());
} else {
- ccx.tcx.sess.span_err(span, "type parameter has more than one relaxed default \
+ span_err!(ccx.tcx.sess, span, E0203,
+ "type parameter has more than one relaxed default \
bound, only one is supported");
}
}
impl trait, self type, or predicates",
param_ty.user_string(tcx)).as_slice());
} else {
- tcx.sess.span_err(
- ty_param.span,
- format!("the type parameter `{}` is not constrained by the \
+ span_err!(tcx.sess, ty_param.span, E0207,
+ "the type parameter `{}` is not constrained by the \
impl trait, self type, or predicates",
- param_ty.user_string(tcx)).as_slice());
+ param_ty.user_string(tcx));
tcx.sess.span_help(
ty_param.span,
format!("you can temporarily opt out of this rule by placing \
#![allow(non_snake_case)]
-register_diagnostic! {
- E0001,
-r##"
- This error suggests that the expression arm corresponding to the noted pattern
- will never be reached as for all possible values of the expression being matched,
- one of the preceeding patterns will match.
-
- This means that perhaps some of the preceeding patterns are too general, this
- one is too specific or the ordering is incorrect.
-"## }
-
register_diagnostics! {
- E0002,
- E0003,
- E0004,
- E0005,
- E0006,
- E0007,
- E0008,
- E0009,
- E0010,
- E0011,
- E0012,
- E0013,
- E0014,
- E0015,
- E0016,
- E0017,
- E0018,
- E0019,
- E0020,
- E0022,
E0023,
E0024,
E0025,
E0046,
E0049,
E0050,
- E0051,
- E0052,
E0053,
E0054,
E0055,
- E0056,
E0057,
E0059,
E0060,
E0092,
E0093,
E0094,
- E0100,
E0101,
E0102,
E0103,
E0104,
E0106,
E0107,
- E0108,
- E0109,
- E0110,
E0116,
E0117,
E0118,
E0130,
E0131,
E0132,
- E0133,
- E0134,
- E0135,
- E0136,
- E0137,
- E0138,
- E0139,
- E0140,
E0141,
- E0152,
- E0153,
- E0157,
- E0158,
E0159,
- E0161,
- E0162,
E0163,
E0164,
- E0165,
E0166,
E0167,
E0168,
- E0169,
- E0171,
E0172,
E0173, // manual implementations of unboxed closure traits are experimental
E0174, // explicit use of unboxed closure methods are experimental
- E0177,
E0178,
- E0180,
- E0181,
E0182,
E0183,
- E0184
+ E0184,
+ E0185,
+ E0186,
+ E0187, // can't infer the kind of the closure
+ E0188, // types differ in mutability
+ E0189, // can only cast a boxed pointer to a boxed object
+ E0190, // can only cast a &-pointer to an &-object
+ E0191, // value of the associated type must be specified
+ E0192, // negative imples are allowed just fo `Send` and `Sync`
+ E0193, // cannot bound type where clause bounds may only be attached to types
+ // involving type parameters
+ E0194,
+ E0195, // lifetime parameters or bounds on method do not match the trait declaration
+ E0196, // cannot determine a type for this unboxed closure
+ E0197, // inherent impls cannot be declared as unsafe
+ E0198, // negative implementations are not unsafe
+ E0199, // implementing trait is not unsafe
+ E0200, // trait requires an `unsafe impl` declaration
+ E0201, // duplicate method in trait impl
+ E0202, // associated items are not allowed in inherint impls
+ E0203, // type parameter has more than one relaxed default bound,
+ // and only one is supported
+ E0204, // trait `Copy` may not be implemented for this type; field
+ // does not implement `Copy`
+ E0205, // trait `Copy` may not be implemented for this type; variant
+ // does not implement `copy`
+ E0206, // trait `Copy` may not be implemented for this type; type is
+ // not a structure or enumeration
+ E0207, // type parameter is not constrained by the impl trait, self type, or predicate
+ E0208,
+ E0209, // builtin traits can only be implemented on structs or enums
+ E0210, // type parameter is not constrained by any local type
+ E0211,
+ E0212, // cannot extract an associated type from a higher-ranked trait bound
+ E0213, // associated types are not accepted in this context
+ E0214, // parenthesized parameters may only be used with a trait
+ E0215, // angle-bracket notation is not stable with `Fn`
+ E0216, // parenthetical notation is only stable with `Fn`
+ E0217, // ambiguous associated type, defined in multiple supertraits
+ E0218, // no associated type defined
+ E0219, // associated type defined in higher-ranked supertrait
+ E0220, // associated type not found for type parameter
+ E0221, // ambiguous associated type in bounds
+ E0222, // variadic function must have C calling convention
+ E0223, // ambiguous associated type
+ E0224, // at least one non-builtin train is required for an object type
+ E0225, // only the builtin traits can be used as closure or object bounds
+ E0226, // only a single explicit lifetime bound is permitted
+ E0227, // ambiguous lifetime bound, explicit lifetime bound required
+ E0228, // explicit lifetime bound required
+ E0229, // associated type bindings are not allowed here
+ E0230, // there is no type parameter on trait
+ E0231, // only named substitution parameters are allowed
+ E0232, // this attribute must have a value
+ E0233,
+ E0234, // `for` loop expression has type which does not implement the `Iterator` trait
+ E0235, // structure constructor specifies a structure of type but
+ E0236, // no lang item for range syntax
+ E0237, // no lang item for range syntax
+ E0238, // parenthesized parameters may only be used with a trait
+ E0239, // `next` method of `Iterator` trait has unexpected type
+ E0240,
+ E0241,
+ E0242, // internal error looking up a definition
+ E0243, // wrong number of type arguments
+ E0244, // wrong number of type arguments
+ E0245, // not a trait
+ E0246, // illegal recursive type
+ E0247, // found module name used as a type
+ E0248, // found value name used as a type
+ E0249, // expected constant expr for array length
+ E0250 // expected constant expr for array length
}
+
+__build_diagnostic_array! { DIAGNOSTICS }
+
use std::cell::RefCell;
+// NB: This module needs to be declared first so diagnostics are
+// registered before they are used.
+pub mod diagnostics;
+
mod check;
mod rscope;
mod astconv;
match tcx.def_map.borrow().get(&id) {
Some(x) => x.clone(),
_ => {
- tcx.sess.span_fatal(sp, "internal error looking up a definition")
+ span_fatal!(tcx.sess, sp, E0242, "internal error looking up a definition")
}
}
}
match result {
Ok(_) => true,
Err(ref terr) => {
- tcx.sess.span_err(span,
- &format!("{}: {}",
+ span_err!(tcx.sess, span, E0211,
+ "{}: {}",
msg(),
ty::type_err_to_str(tcx,
- terr))[]);
+ terr));
ty::note_and_explain_type_err(tcx, terr);
false
}
// attribute and report an error with various results if found.
if ty::has_attr(tcx, item_def_id, "rustc_variance") {
let found = item_variances.repr(tcx);
- tcx.sess.span_err(tcx.map.span(item_id), &found[]);
+ span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{}", &found[]);
}
let newly_added = tcx.item_variance_map.borrow_mut()
pub fn build_external_trait(cx: &DocContext, tcx: &ty::ctxt,
did: ast::DefId) -> clean::Trait {
+ use clean::TraitMethod;
+
let def = ty::lookup_trait_def(tcx, did);
let trait_items = ty::trait_items(tcx, did).clean(cx);
let provided = ty::provided_trait_methods(tcx, did);
let items = trait_items.into_iter().map(|trait_item| {
- if provided.iter().any(|a| a.def_id == trait_item.def_id) {
- clean::ProvidedMethod(trait_item)
- } else {
- clean::RequiredMethod(trait_item)
+ match trait_item.inner {
+ clean::TyMethodItem(_) => {
+ if provided.iter().any(|a| a.def_id == trait_item.def_id) {
+ TraitMethod::ProvidedMethod(trait_item)
+ } else {
+ TraitMethod::RequiredMethod(trait_item)
+ }
+ },
+ clean::AssociatedTypeItem(_) => TraitMethod::TypeTraitItem(trait_item),
+ _ => unreachable!()
}
});
let trait_def = ty::lookup_trait_def(tcx, did);
};
Some(item)
}
- ty::TypeTraitItem(_) => {
- // FIXME(pcwalton): Implement.
- None
+ ty::TypeTraitItem(ref assoc_ty) => {
+ let did = assoc_ty.def_id;
+ let type_scheme = ty::lookup_item_type(tcx, did);
+ // Not sure the choice of ParamSpace actually matters here, because an
+ // associated type won't have generics on the LHS
+ let typedef = (type_scheme, subst::ParamSpace::TypeSpace).clean(cx);
+ Some(clean::Item {
+ name: Some(assoc_ty.name.clean(cx)),
+ inner: clean::TypedefItem(typedef),
+ source: clean::Span::empty(),
+ attrs: vec![],
+ visibility: None,
+ stability: stability::lookup(tcx, did).clean(cx),
+ def_id: did
+ })
}
}
}).collect();
+ let polarity = csearch::get_impl_polarity(tcx, did);
return Some(clean::Item {
inner: clean::ImplItem(clean::Impl {
derived: clean::detect_derived(attrs.as_slice()),
for_: ty.ty.clean(cx),
generics: (&ty.generics, subst::TypeSpace).clean(cx),
items: trait_items,
+ polarity: polarity.map(|p| { p.clean(cx) }),
}),
source: clean::Span::empty(),
name: None,
use rustc::middle::subst::{self, ParamSpace, VecPerParamSpace};
use rustc::middle::ty;
use rustc::middle::stability;
-use rustc::session::config;
use std::rc::Rc;
use std::u32;
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Crate {
pub name: String,
pub src: FsPath,
impl<'a, 'tcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx> {
fn clean(&self, cx: &DocContext) -> Crate {
+ use rustc::session::config::Input;
+
let mut externs = Vec::new();
cx.sess().cstore.iter_crate_data(|n, meta| {
externs.push((n, meta.clean(cx)));
externs.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
// Figure out the name of this crate
- let input = config::Input::File(cx.src.clone());
- let name = link::find_crate_name(None, self.attrs.as_slice(), &input);
+ let input = &cx.input;
+ let name = link::find_crate_name(None, self.attrs.as_slice(), input);
// Clean the crate, translating the entire libsyntax AST to one that is
// understood by rustdoc.
m.items.extend(tmp.into_iter());
}
+ let src = match cx.input {
+ Input::File(ref path) => path.clone(),
+ Input::Str(_) => FsPath::new("") // FIXME: this is wrong
+ };
+
Crate {
name: name.to_string(),
- src: cx.src.clone(),
+ src: src,
module: Some(module),
externs: externs,
primitives: primitives,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct ExternalCrate {
pub name: String,
pub attrs: Vec<Attribute>,
/// Anything with a source location and set of attributes and, optionally, a
/// name. That is, anything that can be documented. This doesn't correspond
/// directly to the AST's concept of an item; it's a strict superset.
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Item {
/// Stringified span
pub source: Span,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub enum ItemEnum {
StructItem(Struct),
EnumItem(Enum),
AssociatedTypeItem(TyParam),
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Module {
pub items: Vec<Item>,
pub is_crate: bool,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Method {
pub generics: Generics,
pub self_: SelfTy,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct TyMethod {
pub unsafety: ast::Unsafety,
pub decl: FnDecl,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)]
+#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)]
pub enum SelfTy {
SelfStatic,
SelfValue,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Function {
pub decl: FnDecl,
pub generics: Generics,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Trait {
pub unsafety: ast::Unsafety,
pub items: Vec<TraitMethod>,
/// An item belonging to a trait, whether a method or associated. Could be named
/// TraitItem except that's already taken by an exported enum variant.
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub enum TraitMethod {
RequiredMethod(Item),
ProvidedMethod(Item),
- TypeTraitItem(Item),
+ TypeTraitItem(Item), // an associated type
}
impl TraitMethod {
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub enum ImplMethod {
MethodImplItem(Item),
TypeImplItem(Item),
PrimitiveTuple,
}
-#[derive(Clone, RustcEncodable, RustcDecodable, Copy)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Copy, Show)]
pub enum TypeKind {
TypeEnum,
TypeFunction,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub enum StructField {
HiddenStructField, // inserted later by strip passes
TypedStructField(Type),
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Struct {
pub struct_type: doctree::StructType,
pub generics: Generics,
/// This is a more limited form of the standard Struct, different in that
/// it lacks the things most items have (name, id, parameterization). Found
/// only as a variant in an enum.
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct VariantStruct {
pub struct_type: doctree::StructType,
pub fields: Vec<Item>,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Enum {
pub variants: Vec<Item>,
pub generics: Generics,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Variant {
pub kind: VariantKind,
}
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub enum VariantKind {
CLikeVariant,
TupleVariant(Vec<Type>),
Span {
filename: filename.to_string(),
loline: lo.line,
- locol: lo.col.to_uint(),
+ locol: lo.col.to_usize(),
hiline: hi.line,
- hicol: hi.col.to_uint(),
+ hicol: hi.col.to_usize(),
}
}
}
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Typedef {
pub type_: Type,
pub generics: Generics,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Show, Clone, RustcEncodable, RustcDecodable, PartialEq, Copy, Show)]
+pub enum ImplPolarity {
+ Positive,
+ Negative,
+}
+
+impl Clean<ImplPolarity> for ast::ImplPolarity {
+ fn clean(&self, _: &DocContext) -> ImplPolarity {
+ match self {
+ &ast::ImplPolarity::Positive => ImplPolarity::Positive,
+ &ast::ImplPolarity::Negative => ImplPolarity::Negative,
+ }
+ }
+}
+
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Impl {
pub generics: Generics,
pub trait_: Option<Type>,
pub for_: Type,
pub items: Vec<Item>,
pub derived: bool,
+ pub polarity: Option<ImplPolarity>,
}
fn detect_derived<M: AttrMetaMethods>(attrs: &[M]) -> bool {
}
}).collect(),
derived: detect_derived(self.attrs.as_slice()),
+ polarity: Some(self.polarity.clean(cx)),
}),
}
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct ViewItem {
pub inner: ViewItemInner,
}
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub enum ViewItemInner {
ExternCrate(String, Option<String>, ast::NodeId),
Import(ViewPath)
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub enum ViewPath {
// use source as str;
SimpleImport(String, ImportSource),
ImportList(ImportSource, Vec<ViewListIdent>),
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct ImportSource {
pub path: Path,
pub did: Option<ast::DefId>,
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct ViewListIdent {
pub name: String,
pub source: Option<ast::DefId>,
})
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Macro {
pub source: String,
}
}
}
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Show)]
pub struct Stability {
pub level: attr::StabilityLevel,
pub text: String
source: DUMMY_SP.clean(cx),
name: Some(self.name.clean(cx)),
attrs: Vec::new(),
- // FIXME(#18048): this is wrong, but cross-crate associated types are broken
- // anyway, for the time being.
inner: AssociatedTypeItem(TyParam {
name: self.name.clean(cx),
did: ast::DefId {
krate: 0,
node: ast::DUMMY_NODE_ID
},
+ // FIXME(#20727): bounds are missing and need to be filled in from the
+ // predicates on the trait itself
bounds: vec![],
default: None,
}),
}
}
+impl<'a> Clean<Typedef> for (ty::TypeScheme<'a>, ParamSpace) {
+ fn clean(&self, cx: &DocContext) -> Typedef {
+ let (ref ty_scheme, ps) = *self;
+ Typedef {
+ type_: ty_scheme.ty.clean(cx),
+ generics: (&ty_scheme.generics, ps).clean(cx)
+ }
+ }
+}
+
fn lang_struct(cx: &DocContext, did: Option<ast::DefId>,
t: ty::Ty, name: &str,
fallback: fn(Box<Type>) -> Type) -> Type {
use rustc_driver::driver;
use rustc::session::{self, config};
use rustc::session::config::UnstableFeatures;
-use rustc::session::search_paths::SearchPaths;
use rustc::middle::{privacy, ty};
use rustc::lint;
use rustc_trans::back::link;
use clean;
use clean::Clean;
+pub use rustc::session::config::Input;
+pub use rustc::session::search_paths::SearchPaths;
+
/// Are we generating documentation (`Typed`) or tests (`NotTyped`)?
pub enum MaybeTyped<'tcx> {
Typed(ty::ctxt<'tcx>),
pub struct DocContext<'tcx> {
pub krate: &'tcx ast::Crate,
pub maybe_typed: MaybeTyped<'tcx>,
- pub src: Path,
+ pub input: Input,
pub external_paths: ExternalPaths,
pub external_traits: RefCell<Option<HashMap<ast::DefId, clean::Trait>>>,
pub external_typarams: RefCell<Option<HashMap<ast::DefId, String>>>,
pub type Externs = HashMap<String, Vec<String>>;
pub fn run_core(search_paths: SearchPaths, cfgs: Vec<String>, externs: Externs,
- cpath: &Path, triple: Option<String>)
+ input: Input, triple: Option<String>)
-> (clean::Crate, CrateAnalysis) {
// Parse, resolve, and typecheck the given crate.
- let input = config::Input::File(cpath.clone());
+ let cpath = match input {
+ Input::File(ref p) => Some(p.clone()),
+ _ => None
+ };
let warning_lint = lint::builtin::WARNINGS.name_lower();
let span_diagnostic_handler =
diagnostic::mk_span_handler(diagnostic_handler, codemap);
- let sess = session::build_session_(sessopts,
- Some(cpath.clone()),
+ let sess = session::build_session_(sessopts, cpath,
span_diagnostic_handler);
let cfg = config::build_configuration(&sess);
let ctxt = DocContext {
krate: ty_cx.map.krate(),
maybe_typed: Typed(ty_cx),
- src: cpath.clone(),
+ input: input,
external_traits: RefCell::new(Some(HashMap::new())),
external_typarams: RefCell::new(Some(HashMap::new())),
external_paths: RefCell::new(Some(HashMap::new())),
l_sysid: 0,
};
let ret = unsafe {
- libc::fcntl(fd, os::F_SETLKW, &flock as *const os::flock)
+ libc::fcntl(fd, os::F_SETLKW, &flock)
};
if ret == -1 {
let errno = stdos::errno();
l_sysid: 0,
};
unsafe {
- libc::fcntl(self.fd, os::F_SETLK, &flock as *const os::flock);
+ libc::fcntl(self.fd, os::F_SETLK, &flock);
libc::close(self.fd);
}
}
// This is a documented path, link to it!
Some((ref fqp, shortty)) if abs_root.is_some() => {
let mut url = String::from_str(abs_root.unwrap().as_slice());
- let to_link = &fqp[..(fqp.len() - 1)];
+ let to_link = &fqp[..fqp.len() - 1];
for component in to_link.iter() {
url.push_str(component.as_slice());
url.push_str("/");
search_index.push(IndexItem {
ty: shortty(item),
name: item.name.clone().unwrap(),
- path: fqp[..(fqp.len() - 1)].connect("::"),
+ path: fqp[..fqp.len() - 1].connect("::"),
desc: shorter(item.doc_value()).to_string(),
parent: Some(did),
});
};
let mut mydst = dst.clone();
- for part in remote_path[..(remote_path.len() - 1)].iter() {
+ for part in remote_path[..remote_path.len() - 1].iter() {
mydst.push(part.as_slice());
try!(mkdir(&mydst));
}
clean::StructFieldItem(..) |
clean::VariantItem(..) => {
((Some(*self.parent_stack.last().unwrap()),
- Some(&self.stack[..(self.stack.len() - 1)])),
+ Some(&self.stack[..self.stack.len() - 1])),
false)
}
clean::MethodItem(..) => {
let did = *last;
let path = match self.paths.get(&did) {
Some(&(_, ItemType::Trait)) =>
- Some(&self.stack[..(self.stack.len() - 1)]),
+ Some(&self.stack[..self.stack.len() - 1]),
// The current stack not necessarily has correlation for
// where the type was defined. On the other hand,
// `paths` always has the right information if present.
Some(&(ref fqp, ItemType::Struct)) |
Some(&(ref fqp, ItemType::Enum)) =>
- Some(&fqp[..(fqp.len() - 1)]),
+ Some(&fqp[..fqp.len() - 1]),
Some(..) => Some(self.stack.as_slice()),
None => None
};
.collect::<String>();
match cache().paths.get(&it.def_id) {
Some(&(ref names, _)) => {
- for name in (&names[..(names.len() - 1)]).iter() {
+ for name in (&names[..names.len() - 1]).iter() {
url.push_str(name.as_slice());
url.push_str("/");
}
try!(write!(w, "<h3 class='impl'>{}<code>impl{} ",
ConciseStability(&i.stability),
i.impl_.generics));
+ match i.impl_.polarity {
+ Some(clean::ImplPolarity::Negative) => try!(write!(w, "!")),
+ _ => {}
+ }
match i.impl_.trait_ {
Some(ref ty) => try!(write!(w, "{} for ", *ty)),
None => {}
$('.do-search').on('click', search);
$('.search-input').on('keyup', function() {
clearTimeout(keyUpTimeout);
- keyUpTimeout = setTimeout(search, 100);
+ keyUpTimeout = setTimeout(search, 500);
});
// Push and pop states are used to add search results to the browser
info!("starting to run rustc");
let (mut krate, analysis) = std::thread::Thread::scoped(move |:| {
+ use rustc::session::config::Input;
+
let cr = cr;
- core::run_core(paths, cfgs, externs, &cr, triple)
+ core::run_core(paths, cfgs, externs, Input::File(cr), triple)
}).join().map_err(|_| "rustc failed").unwrap();
info!("finished with rustc");
let mut analysis = Some(analysis);
let ctx = core::DocContext {
krate: &krate,
maybe_typed: core::NotTyped(sess),
- src: input_path,
+ input: input,
external_paths: RefCell::new(Some(HashMap::new())),
external_traits: RefCell::new(None),
external_typarams: RefCell::new(None),
//!
//! An object is a series of string keys mapping to values, in `"key": value` format.
//! Arrays are enclosed in square brackets ([ ... ]) and objects in curly brackets ({ ... }).
-//! A simple JSON document encoding a person, his/her age, address and phone numbers could look like
+//! A simple JSON document encoding a person, their age, address and phone numbers could look like
//!
//! ```ignore
//! {
InternalIndex(i) => StackElement::Index(i),
InternalKey(start, size) => {
StackElement::Key(str::from_utf8(
- &self.str_buffer[(start as uint) .. (start as uint + size as uint)])
+ &self.str_buffer[start as uint .. start as uint + size as uint])
.unwrap())
}
}
Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
Some(&InternalKey(start, size)) => {
Some(StackElement::Key(str::from_utf8(
- &self.str_buffer[(start as uint) .. (start+size) as uint]
+ &self.str_buffer[start as uint .. (start+size) as uint]
).unwrap()))
}
}
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// min_capacity(size) must be smaller than the internal capacity,
// so that the map is not resized:
// `min_capacity(usable_capacity(x)) <= x`.
- // The lef-hand side can only be smaller due to flooring by integer
+ // The left-hand side can only be smaller due to flooring by integer
// division.
//
// This doesn't have to be checked for overflow since allocation size
/// map.insert("b", 2);
/// map.insert("c", 3);
///
- /// for key in map.values() {
- /// println!("{}", key);
+ /// for val in map.values() {
+ /// println!("{}", val);
/// }
/// ```
#[stable]
search_entry_hashed(&mut self.table, hash, key)
}
- /// Return the number of elements in the map.
+ /// Returns the number of elements in the map.
///
/// # Example
///
#[stable]
pub fn len(&self) -> uint { self.table.size() }
- /// Return true if the map contains no elements.
+ /// Returns true if the map contains no elements.
///
/// # Example
///
}
}
-/// HashMap iterator
+/// HashMap iterator.
#[stable]
pub struct Iter<'a, K: 'a, V: 'a> {
inner: table::Iter<'a, K, V>
}
}
-/// HashMap mutable values iterator
+/// HashMap mutable values iterator.
#[stable]
pub struct IterMut<'a, K: 'a, V: 'a> {
inner: table::IterMut<'a, K, V>
}
-/// HashMap move iterator
+/// HashMap move iterator.
#[stable]
pub struct IntoIter<K, V> {
inner: iter::Map<
>
}
-/// HashMap keys iterator
+/// HashMap keys iterator.
#[stable]
pub struct Keys<'a, K: 'a, V: 'a> {
inner: Map<(&'a K, &'a V), &'a K, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a K>
}
}
-/// HashMap values iterator
+/// HashMap values iterator.
#[stable]
pub struct Values<'a, K: 'a, V: 'a> {
inner: Map<(&'a K, &'a V), &'a V, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a V>
}
}
-/// HashMap drain iterator
+/// HashMap drain iterator.
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub struct Drain<'a, K: 'a, V: 'a> {
inner: iter::Map<
>
}
-/// A view into a single occupied location in a HashMap
+/// A view into a single occupied location in a HashMap.
#[unstable = "precise API still being fleshed out"]
pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
elem: FullBucket<K, V, &'a mut RawTable<K, V>>,
}
-/// A view into a single empty location in a HashMap
+/// A view into a single empty location in a HashMap.
#[unstable = "precise API still being fleshed out"]
pub struct VacantEntry<'a, K: 'a, V: 'a> {
hash: SafeHash,
elem: VacantEntryState<K, V, &'a mut RawTable<K, V>>,
}
-/// A view into a single location in a map, which may be vacant or occupied
+/// A view into a single location in a map, which may be vacant or occupied.
#[unstable = "precise API still being fleshed out"]
pub enum Entry<'a, K: 'a, V: 'a> {
- /// An occupied Entry
+ /// An occupied Entry.
Occupied(OccupiedEntry<'a, K, V>),
- /// A vacant Entry
+ /// A vacant Entry.
Vacant(VacantEntry<'a, K, V>),
}
-/// Possible states of a VacantEntry
+/// Possible states of a VacantEntry.
enum VacantEntryState<K, V, M> {
/// The index is occupied, but the key to insert has precedence,
- /// and will kick the current one out on insertion
+ /// and will kick the current one out on insertion.
NeqElem(FullBucket<K, V, M>, uint),
- /// The index is genuinely vacant
+ /// The index is genuinely vacant.
NoElem(EmptyBucket<K, V, M>),
}
#[unstable = "matches collection reform v2 specification, waiting for dust to settle"]
impl<'a, K, V> Entry<'a, K, V> {
- /// Returns a mutable reference to the entry if occupied, or the VacantEntry if vacant
+ /// Returns a mutable reference to the entry if occupied, or the VacantEntry if vacant.
pub fn get(self) -> Result<&'a mut V, VacantEntry<'a, K, V>> {
match self {
Occupied(entry) => Ok(entry.into_mut()),
#[unstable = "matches collection reform v2 specification, waiting for dust to settle"]
impl<'a, K, V> OccupiedEntry<'a, K, V> {
- /// Gets a reference to the value in the entry
+ /// Gets a reference to the value in the entry.
pub fn get(&self) -> &V {
self.elem.read().1
}
- /// Gets a mutable reference to the value in the entry
+ /// Gets a mutable reference to the value in the entry.
pub fn get_mut(&mut self) -> &mut V {
self.elem.read_mut().1
}
assert_eq!(m.remove(&0), Some(0));
}
- #[test]
- fn test_find_equiv() {
- let mut m = HashMap::new();
-
- let (foo, bar, baz) = (1i,2i,3i);
- m.insert("foo".to_string(), foo);
- m.insert("bar".to_string(), bar);
- m.insert("baz".to_string(), baz);
-
-
- assert_eq!(m.get("foo"), Some(&foo));
- assert_eq!(m.get("bar"), Some(&bar));
- assert_eq!(m.get("baz"), Some(&baz));
-
- assert_eq!(m.get("qux"), None);
- }
-
#[test]
fn test_from_iter() {
let xs = [(1i, 1i), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
/// This works similarly to `put`, building an `EmptyBucket` out of the
/// taken bucket.
pub fn take(mut self) -> (EmptyBucket<K, V, M>, K, V) {
- let key = self.raw.key as *const K;
- let val = self.raw.val as *const V;
-
self.table.size -= 1;
unsafe {
idx: self.idx,
table: self.table
},
- ptr::read(key),
- ptr::read(val)
+ ptr::read(self.raw.key),
+ ptr::read(self.raw.val)
)
}
}
pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
unsafe {
*self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
- copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key as *const K, 1);
- copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val as *const V, 1);
+ copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key, 1);
+ copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val, 1);
}
let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;
if *self.raw.hash != EMPTY_BUCKET {
self.elems_left -= 1;
return Some((
- ptr::read(self.raw.key as *const K),
- ptr::read(self.raw.val as *const V)
+ ptr::read(self.raw.key),
+ ptr::read(self.raw.val)
));
}
}
SafeHash {
hash: *bucket.hash,
},
- ptr::read(bucket.key as *const K),
- ptr::read(bucket.val as *const V)
+ ptr::read(bucket.key),
+ ptr::read(bucket.val)
)
}
})
SafeHash {
hash: ptr::replace(bucket.hash, EMPTY_BUCKET),
},
- ptr::read(bucket.key as *const K),
- ptr::read(bucket.val as *const V)
+ ptr::read(bucket.key),
+ ptr::read(bucket.val)
)
}
})
//! chain information:
//!
//! ```
-//! trait Error: Send {
+//! trait Error {
//! fn description(&self) -> &str;
//!
//! fn detail(&self) -> Option<String> { None }
/// Base functionality for all errors in Rust.
#[unstable = "the exact API of this trait may change"]
-pub trait Error: Send {
+pub trait Error {
/// A short description of the error; usually a static string.
fn description(&self) -> &str;
//! - `fmt::Show` implementations should be implemented for **all** public types.
//! Output will typically represent the internal state as faithfully as possible.
//! The purpose of the `Show` trait is to facilitate debugging Rust code. In
-//! most cases, using `#[deriving(Show)]` is sufficient and recommended.
+//! most cases, using `#[derive(Show)]` is sufficient and recommended.
//!
//! Some examples of the output from both traits:
//!
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
match buf.iter().rposition(|&b| b == b'\n') {
Some(i) => {
- try!(self.inner.write(&buf[..(i + 1)]));
+ try!(self.inner.write(&buf[..i + 1]));
try!(self.inner.flush());
- try!(self.inner.write(&buf[(i + 1)..]));
+ try!(self.inner.write(&buf[i + 1..]));
Ok(())
}
None => self.inner.write(buf),
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
- let input = &self.buf[self.pos.. (self.pos + write_len)];
+ let input = &self.buf[self.pos.. self.pos + write_len];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
let write_len = min(buf.len(), self.buf.len() - self.pos);
{
- let input = &self.buf[self.pos.. (self.pos + write_len)];
+ let input = &self.buf[self.pos.. self.pos + write_len];
let output = buf.slice_to_mut(write_len);
assert_eq!(input.len(), output.len());
slice::bytes::copy_memory(output, input);
use fmt;
use int;
use iter::{Iterator, IteratorExt};
-use marker::Sized;
+use marker::{Sized, Send};
use mem::transmute;
use ops::FnOnce;
use option::Option;
}
}
-impl FromError<IoError> for Box<Error> {
- fn from_error(err: IoError) -> Box<Error> {
+impl FromError<IoError> for Box<Error + Send> {
+ fn from_error(err: IoError) -> Box<Error + Send> {
box err
}
}
fn read_until(&mut self, byte: u8) -> IoResult<Vec<u8>> {
let mut res = Vec::new();
- let mut used;
loop {
- {
+ let (done, used) = {
let available = match self.fill_buf() {
Ok(n) => n,
Err(ref e) if res.len() > 0 && e.kind == EndOfFile => {
- used = 0;
- break
+ return Ok(res);
}
Err(e) => return Err(e)
};
match available.iter().position(|&b| b == byte) {
Some(i) => {
- res.push_all(&available[..(i + 1)]);
- used = i + 1;
- break
+ res.push_all(&available[..i + 1]);
+ (true, i + 1)
}
None => {
res.push_all(available);
- used = available.len();
+ (false, available.len())
}
}
+ };
+ buffer.consume(used);
+ if done {
+ return Ok(res);
}
- self.consume(used);
}
- self.consume(used);
- Ok(res)
}
/// Reads the next utf8-encoded character from the underlying stream.
use collections::HashMap;
use ffi::CString;
use fmt;
-// NOTE(stage0) remove import after a snapshot
-#[cfg(stage0)]
-use hash::Hash;
use io::pipe::{PipeStream, PipePair};
use io::{IoResult, IoError};
use io;
use thread::Thread;
use time::Duration;
+ #[test]
+ fn test_timer_send() {
+ let mut timer = Timer::new().unwrap();
+ Thread::spawn(move || timer.sleep(Duration::milliseconds(1)));
+ }
+
#[test]
fn test_io_timer_sleep_simple() {
let mut timer = Timer::new().unwrap();
//!
//! The [`ptr`](ptr/index.html) and [`mem`](mem/index.html)
//! modules deal with unsafe pointers and memory manipulation.
-//! [`markers`](markers/index.html) defines the special built-in traits,
+//! [`marker`](marker/index.html) defines the special built-in traits,
//! and [`raw`](raw/index.html) the runtime representation of Rust types.
//! These are some of the lowest-level building blocks in Rust.
//!
use fmt;
use io::{IoResult, IoError};
use iter::{Iterator, IteratorExt};
-use marker::Copy;
+use marker::{Copy, Send};
use libc::{c_void, c_int, c_char};
use libc;
use boxed::Box;
fn detail(&self) -> Option<String> { Some(format!("{:?}", self)) }
}
-impl FromError<MapError> for Box<Error> {
- fn from_error(err: MapError) -> Box<Error> {
+impl FromError<MapError> for Box<Error + Send> {
+ fn from_error(err: MapError) -> Box<Error + Send> {
box err
}
}
match name.rposition_elem(&dot) {
None | Some(0) => None,
Some(1) if name == b".." => None,
- Some(pos) => Some(&name[(pos+1)..])
+ Some(pos) => Some(&name[pos+1..])
}
}
}
None => {
self.repr = Path::normalize(filename);
}
- Some(idx) if &self.repr[(idx+1)..] == b".." => {
+ Some(idx) if &self.repr[idx+1..] == b".." => {
let mut v = Vec::with_capacity(self.repr.len() + 1 + filename.len());
v.push_all(self.repr.as_slice());
v.push(SEP_BYTE);
}
Some(idx) => {
let mut v = Vec::with_capacity(idx + 1 + filename.len());
- v.push_all(&self.repr[..(idx+1)]);
+ v.push_all(&self.repr[..idx+1]);
v.push_all(filename);
// FIXME: this is slow
self.repr = Path::normalize(v.as_slice());
None if b".." == self.repr => self.repr.as_slice(),
None => dot_static,
Some(0) => &self.repr[..1],
- Some(idx) if &self.repr[(idx+1)..] == b".." => self.repr.as_slice(),
+ Some(idx) if &self.repr[idx+1..] == b".." => self.repr.as_slice(),
Some(idx) => &self.repr[..idx]
}
}
None if b"." == self.repr ||
b".." == self.repr => None,
None => Some(self.repr.as_slice()),
- Some(idx) if &self.repr[(idx+1)..] == b".." => None,
+ Some(idx) if &self.repr[idx+1..] == b".." => None,
Some(0) if self.repr[1..].is_empty() => None,
- Some(idx) => Some(&self.repr[(idx+1)..])
+ Some(idx) => Some(&self.repr[idx+1..])
}
}
if self.prefix.is_some() {
Some(Path::new(match self.prefix {
Some(DiskPrefix) if self.is_absolute() => {
- &self.repr[..(self.prefix_len()+1)]
+ &self.repr[..self.prefix_len()+1]
}
Some(VerbatimDiskPrefix) => {
- &self.repr[..(self.prefix_len()+1)]
+ &self.repr[..self.prefix_len()+1]
}
_ => &self.repr[..self.prefix_len()]
}))
Some(_) => {
let plen = self.prefix_len();
if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE {
- &repr[(plen+1)..]
+ &repr[plen+1..]
} else { &repr[plen..] }
}
None if repr.as_bytes()[0] == SEP_BYTE => &repr[1..],
}
Some(UNCPrefix(a,b)) => {
s.push_str("\\\\");
- s.push_str(&prefix_[2..(a+2)]);
+ s.push_str(&prefix_[2..a+2]);
s.push(SEP);
- s.push_str(&prefix_[(3+a)..(3+a+b)]);
+ s.push_str(&prefix_[3+a..3+a+b]);
}
Some(_) => s.push_str(prefix_),
None => ()
fn update_sepidx(&mut self) {
let s = if self.has_nonsemantic_trailing_slash() {
- &self.repr[..(self.repr.len()-1)]
+ &self.repr[..self.repr.len()-1]
} else { &self.repr[] };
let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) {
is_sep
None => return None,
Some(x) => x
};
- path = &path[(idx_a+1)..];
+ path = &path[idx_a+1..];
let idx_b = path.find(f).unwrap_or(path.len());
Some((idx_a, idx_b))
}
static M: StaticMutex = MUTEX_INIT;
let g = M.lock().unwrap();
- let (g, success) = C.wait_timeout(g, Duration::nanoseconds(1000)).unwrap();
- assert!(!success);
+ let (g, _no_timeout) = C.wait_timeout(g, Duration::nanoseconds(1000)).unwrap();
+ // spurious wakeups mean this isn't necessarily true
+ // assert!(!no_timeout);
let _t = Thread::spawn(move || {
let _g = M.lock().unwrap();
C.notify_one();
});
- let (g, success) = C.wait_timeout(g, Duration::days(1)).unwrap();
- assert!(success);
+ let (g, no_timeout) = C.wait_timeout(g, Duration::days(1)).unwrap();
+ assert!(no_timeout);
drop(g);
unsafe { C.destroy(); M.destroy(); }
}
use sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering};
use sync::Arc;
use marker::{Sync, Send};
-#[cfg(stage0)] // NOTE remove use after next snapshot
-use marker::{NoSend, NoSync};
use mem;
use clone::Clone;
inner: Arc<Inner>,
}
-#[cfg(stage0)] // NOTE remove impl after next snapshot
pub struct WaitToken {
inner: Arc<Inner>,
- no_send: NoSend,
- no_sync: NoSync,
}
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
-pub struct WaitToken {
- inner: Arc<Inner>,
-}
-
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl !Send for WaitToken {}
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl !Sync for WaitToken {}
-#[cfg(stage0)] // NOTE remove impl after next snapshot
-pub fn tokens() -> (WaitToken, SignalToken) {
- let inner = Arc::new(Inner {
- thread: Thread::current(),
- woken: ATOMIC_BOOL_INIT,
- });
- let wait_token = WaitToken {
- inner: inner.clone(),
- no_send: NoSend,
- no_sync: NoSync,
- };
- let signal_token = SignalToken {
- inner: inner
- };
- (wait_token, signal_token)
-}
-
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
pub fn tokens() -> (WaitToken, SignalToken) {
let inner = Arc::new(Inner {
thread: Thread::current(),
/// The sending-half of Rust's synchronous channel type. This half can only be
/// owned by one task, but it can be cloned to send to other tasks.
#[stable]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
-pub struct SyncSender<T> {
- inner: Arc<UnsafeCell<sync::Packet<T>>>,
- // can't share in an arc
- _marker: marker::NoSync,
-}
-
-/// The sending-half of Rust's synchronous channel type. This half can only be
-/// owned by one task, but it can be cloned to send to other tasks.
-#[stable]
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
pub struct SyncSender<T> {
inner: Arc<UnsafeCell<sync::Packet<T>>>,
}
unsafe impl<T:Send> Send for SyncSender<T> {}
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl<T> !Sync for SyncSender<T> {}
/// An error returned from the `send` function on channels.
////////////////////////////////////////////////////////////////////////////////
impl<T: Send> SyncSender<T> {
- #[cfg(stage0)] // NOTE remove impl after next snapshot
- fn new(inner: Arc<UnsafeCell<sync::Packet<T>>>) -> SyncSender<T> {
- SyncSender { inner: inner, _marker: marker::NoSync }
- }
-
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
fn new(inner: Arc<UnsafeCell<sync::Packet<T>>>) -> SyncSender<T> {
SyncSender { inner: inner }
}
/// The "receiver set" of the select interface. This structure is used to manage
/// a set of receivers which are being selected over.
-#[cfg(stage0)] // NOTE remove impl after next snapshot
pub struct Select {
head: *mut Handle<'static, ()>,
tail: *mut Handle<'static, ()>,
next_id: Cell<uint>,
- marker1: marker::NoSend,
}
-/// The "receiver set" of the select interface. This structure is used to manage
-/// a set of receivers which are being selected over.
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
-pub struct Select {
- head: *mut Handle<'static, ()>,
- tail: *mut Handle<'static, ()>,
- next_id: Cell<uint>,
-}
-
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl !marker::Send for Select {}
/// A handle to a receiver which is currently a member of a `Select` set of
}
impl Select {
- /// Creates a new selection structure. This set is initially empty and
- /// `wait` will panic!() if called.
- ///
- /// Usage of this struct directly can sometimes be burdensome, and usage is
- /// rather much easier through the `select!` macro.
- #[cfg(stage0)] // NOTE remove impl after next snapshot
- pub fn new() -> Select {
- Select {
- marker1: marker::NoSend,
- head: 0 as *mut Handle<'static, ()>,
- tail: 0 as *mut Handle<'static, ()>,
- next_id: Cell::new(1),
- }
- }
/// Creates a new selection structure. This set is initially empty and
/// `wait` will panic!() if called.
///
/// Usage of this struct directly can sometimes be burdensome, and usage is
/// rather much easier through the `select!` macro.
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
pub fn new() -> Select {
Select {
head: 0 as *mut Handle<'static, ()>,
/// Deref and DerefMut implementations
#[must_use]
#[stable]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
pub struct MutexGuard<'a, T: 'a> {
// funny underscores due to how Deref/DerefMut currently work (they
// disregard field privacy).
__lock: &'a StaticMutex,
__data: &'a UnsafeCell<T>,
__poison: poison::Guard,
- __marker: marker::NoSend,
}
-/// An RAII implementation of a "scoped lock" of a mutex. When this structure is
-/// dropped (falls out of scope), the lock will be unlocked.
-///
-/// The data protected by the mutex can be access through this guard via its
-/// Deref and DerefMut implementations
-#[must_use]
-#[stable]
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
-pub struct MutexGuard<'a, T: 'a> {
- // funny underscores due to how Deref/DerefMut currently work (they
- // disregard field privacy).
- __lock: &'a StaticMutex,
- __data: &'a UnsafeCell<T>,
- __poison: poison::Guard,
-}
-
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl<'a, T> !marker::Send for MutexGuard<'a, T> {}
/// Static initialization of a mutex. This constant can be used to initialize
}
impl<'mutex, T> MutexGuard<'mutex, T> {
- #[cfg(stage0)] // NOTE remove afte next snapshot
- fn new(lock: &'mutex StaticMutex, data: &'mutex UnsafeCell<T>)
- -> LockResult<MutexGuard<'mutex, T>> {
- poison::map_result(lock.poison.borrow(), |guard| {
- MutexGuard {
- __lock: lock,
- __data: data,
- __poison: guard,
- __marker: marker::NoSend,
- }
- })
- }
- #[cfg(not(stage0))] // NOTE remove cfg afte next snapshot
fn new(lock: &'mutex StaticMutex, data: &'mutex UnsafeCell<T>)
-> LockResult<MutexGuard<'mutex, T>> {
poison::map_result(lock.poison.borrow(), |guard| {
use prelude::v1::*;
use cell::UnsafeCell;
-use error::FromError;
+use error::{Error, FromError};
use fmt;
use thread::Thread;
impl<T> fmt::Show for PoisonError<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- "poisoned lock: another task failed inside".fmt(f)
+ self.description().fmt(f)
+ }
+}
+
+impl<T> Error for PoisonError<T> {
+ fn description(&self) -> &str {
+ "poisoned lock: another task failed inside"
}
}
impl<T> fmt::Show for TryLockError<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.description().fmt(f)
+ }
+}
+
+impl<T> Error for TryLockError<T> {
+ fn description(&self) -> &str {
+ match *self {
+ TryLockError::Poisoned(ref p) => p.description(),
+ TryLockError::WouldBlock => "try_lock failed because the operation would block"
+ }
+ }
+
+ fn cause(&self) -> Option<&Error> {
match *self {
- TryLockError::Poisoned(ref p) => p.fmt(f),
- TryLockError::WouldBlock => {
- "try_lock failed because the operation would block".fmt(f)
- }
+ TryLockError::Poisoned(ref p) => Some(p),
+ _ => None
}
}
}
/// dropped.
#[must_use]
#[stable]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
pub struct RwLockReadGuard<'a, T: 'a> {
__lock: &'a StaticRwLock,
__data: &'a UnsafeCell<T>,
- __marker: marker::NoSend,
}
-/// RAII structure used to release the shared read access of a lock when
-/// dropped.
-#[must_use]
-#[stable]
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
-pub struct RwLockReadGuard<'a, T: 'a> {
- __lock: &'a StaticRwLock,
- __data: &'a UnsafeCell<T>,
-}
-
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl<'a, T> !marker::Send for RwLockReadGuard<'a, T> {}
/// RAII structure used to release the exclusive write access of a lock when
/// dropped.
#[must_use]
#[stable]
-#[cfg(stage0)] // NOTE remove impl after next snapshot
-pub struct RwLockWriteGuard<'a, T: 'a> {
- __lock: &'a StaticRwLock,
- __data: &'a UnsafeCell<T>,
- __poison: poison::Guard,
- __marker: marker::NoSend,
-}
-
-/// RAII structure used to release the exclusive write access of a lock when
-/// dropped.
-#[must_use]
-#[stable]
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
pub struct RwLockWriteGuard<'a, T: 'a> {
__lock: &'a StaticRwLock,
__data: &'a UnsafeCell<T>,
__poison: poison::Guard,
}
-#[cfg(not(stage0))] // NOTE remove cfg after next snapshot
impl<'a, T> !marker::Send for RwLockWriteGuard<'a, T> {}
impl<T: Send + Sync> RwLock<T> {
}
impl<'rwlock, T> RwLockReadGuard<'rwlock, T> {
- #[cfg(stage0)] // NOTE remove impl after next snapshot
- fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>)
- -> LockResult<RwLockReadGuard<'rwlock, T>> {
- poison::map_result(lock.poison.borrow(), |_| {
- RwLockReadGuard {
- __lock: lock,
- __data: data,
- __marker: marker::NoSend,
- }
- })
- }
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>)
-> LockResult<RwLockReadGuard<'rwlock, T>> {
poison::map_result(lock.poison.borrow(), |_| {
}
}
impl<'rwlock, T> RwLockWriteGuard<'rwlock, T> {
- #[cfg(stage0)] // NOTE remove impl after next snapshot
- fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>)
- -> LockResult<RwLockWriteGuard<'rwlock, T>> {
- poison::map_result(lock.poison.borrow(), |guard| {
- RwLockWriteGuard {
- __lock: lock,
- __data: data,
- __poison: guard,
- __marker: marker::NoSend,
- }
- })
- }
- #[cfg(not(stage0))] // NOTE remove cfg after next snapshot
fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>)
-> LockResult<RwLockWriteGuard<'rwlock, T>> {
poison::map_result(lock.poison.borrow(), |guard| {
}
let mut info: Dl_info = unsafe { intrinsics::init() };
- if unsafe { dladdr(addr as *const libc::c_void, &mut info) == 0 } {
+ if unsafe { dladdr(addr, &mut info) == 0 } {
output(w, idx,addr, None)
} else {
output(w, idx, addr, Some(unsafe {
trace_argument: *mut libc::c_void)
-> _Unwind_Reason_Code;
- #[cfg(all(not(target_os = "android"),
+ #[cfg(all(not(all(target_os = "android", target_arch = "arm")),
not(all(target_os = "linux", target_arch = "arm"))))]
pub fn _Unwind_GetIP(ctx: *mut _Unwind_Context) -> libc::uintptr_t;
// On android, the function _Unwind_GetIP is a macro, and this is the
// expansion of the macro. This is all copy/pasted directly from the
// header file with the definition of _Unwind_GetIP.
- #[cfg(any(target_os = "android",
+ #[cfg(any(all(target_os = "android", target_arch = "arm"),
all(target_os = "linux", target_arch = "arm")))]
pub unsafe fn _Unwind_GetIP(ctx: *mut _Unwind_Context) -> libc::uintptr_t {
#[repr(C)]
let bytes = unsafe { ffi::c_str_to_bytes(&ptr) };
match str::from_utf8(bytes) {
Ok(s) => try!(demangle(w, s)),
- Err(..) => try!(w.write(&bytes[..(bytes.len()-1)])),
+ Err(..) => try!(w.write(&bytes[..bytes.len()-1])),
}
}
try!(w.write(&['\n' as u8]));
pub fn load_self() -> Option<Vec<u8>> {
unsafe {
fill_utf16_buf_and_decode(|buf, sz| {
- libc::GetModuleFileNameW(0u as libc::DWORD, buf, sz)
+ libc::GetModuleFileNameW(ptr::null_mut(), buf, sz)
}).map(|s| s.to_string().into_bytes())
}
}
RemoveTimer(libc::HANDLE, Sender<()>),
}
+unsafe impl Send for Timer {}
unsafe impl Send for Req {}
-
fn helper(input: libc::HANDLE, messages: Receiver<Req>, _: ()) {
let mut objs = vec![input];
let mut chans = vec![];
// destructor as running for this thread so calls to `get` will return
// `None`.
*(*ptr).dtor_running.get() = true;
- ptr::read((*ptr).inner.get() as *const T);
+ ptr::read((*ptr).inner.get());
}
}
impl Abi {
#[inline]
- pub fn index(&self) -> uint {
- *self as uint
+ pub fn index(&self) -> usize {
+ *self as usize
}
#[inline]
pub fn encode_with_hygiene(&self) -> String {
format!("\x00name_{},ctxt_{}\x00",
- self.name.uint(),
+ self.name.usize(),
self.ctxt)
}
}
/// A SyntaxContext represents a chain of macro-expandings
/// and renamings. Each macro expansion corresponds to
-/// a fresh uint
+/// a fresh usize
// I'm representing this syntax context as an index into
// a table, in order to work around a compiler bug
}
}
- pub fn uint(&self) -> uint {
+ pub fn usize(&self) -> usize {
let Name(nm) = *self;
- nm as uint
+ nm as usize
}
pub fn ident(&self) -> Ident {
ExprAssign(P<Expr>, P<Expr>),
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
ExprField(P<Expr>, SpannedIdent),
- ExprTupField(P<Expr>, Spanned<uint>),
+ ExprTupField(P<Expr>, Spanned<usize>),
ExprIndex(P<Expr>, P<Expr>),
ExprRange(Option<P<Expr>>, Option<P<Expr>>),
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub op: KleeneOp,
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
- pub num_captures: uint,
+ pub num_captures: usize,
}
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
}
impl TokenTree {
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
match *self {
TtToken(_, token::DocComment(_)) => 2,
TtToken(_, token::SpecialVarNt(..)) => 2,
}
}
- pub fn get_tt(&self, index: uint) -> TokenTree {
+ pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TtToken(sp, token::DocComment(_)), 0) => {
TtToken(sp, token::Pound)
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
pub enum StrStyle {
CookedStr,
- RawStr(uint)
+ RawStr(usize)
}
pub type Lit = Spanned<Lit_>;
}
impl LitIntType {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
UnsuffixedIntLit(_) => 0,
SignedIntLit(s, _) => s.suffix_len(),
}
impl IntTy {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
TyIs(true) /* i */ => 1,
TyIs(false) /* is */ | TyI8 => 2,
}
impl UintTy {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
TyUs(true) /* u */ => 1,
TyUs(false) /* us */ | TyU8 => 2,
}
impl FloatTy {
- pub fn suffix_len(&self) -> uint {
+ pub fn suffix_len(&self) -> usize {
match *self {
TyF32 | TyF64 => 3, // add F128 handling here
}
TyPtr(MutTy),
/// A reference (`&'a T` or `&'a mut T`)
TyRptr(Option<Lifetime>, MutTy),
- /// A bare function (e.g. `fn(uint) -> bool`)
+ /// A bare function (e.g. `fn(usize) -> bool`)
TyBareFn(P<BareFnTy>),
/// A tuple (`(A, B, C, D,...)`)
TyTup(Vec<P<Ty>> ),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
-pub struct AttrId(pub uint);
+pub struct AttrId(pub usize);
/// Doc-comments are promoted to attributes that have is_sugared_doc = true
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Show)]
}
impl<'ast> Map<'ast> {
- fn entry_count(&self) -> uint {
+ fn entry_count(&self) -> usize {
self.map.borrow().len()
}
fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> {
- self.map.borrow().get(id as uint).map(|e| *e)
+ self.map.borrow().get(id as usize).map(|e| *e)
}
pub fn krate(&self) -> &'ast Crate {
NodesMatchingSuffix {
map: self,
item_name: parts.last().unwrap(),
- in_which: &parts[..(parts.len() - 1)],
+ in_which: &parts[..parts.len() - 1],
idx: 0,
}
}
fn next(&mut self) -> Option<NodeId> {
loop {
let idx = self.idx;
- if idx as uint >= self.map.entry_count() {
+ if idx as usize >= self.map.entry_count() {
return None;
}
self.idx += 1;
fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) {
debug!("ast_map: {:?} => {:?}", id, entry);
let len = self.map.len();
- if id as uint >= len {
- self.map.extend(repeat(NotPresent).take(id as uint - len + 1));
+ if id as usize >= len {
+ self.map.extend(repeat(NotPresent).take(id as usize - len + 1));
}
- self.map[id as uint] = entry;
+ self.map[id as usize] = entry;
}
fn insert(&mut self, id: NodeId, node: Node<'ast>) {
}
/// Get a string representation of an unsigned int type, with its value.
-/// We want to avoid "42uint" in favor of "42u"
+/// We want to avoid "42u" in favor of "42us". "42uint" is right out.
pub fn uint_ty_to_string(t: UintTy, val: Option<u64>) -> String {
let s = match t {
TyUs(true) if val.is_some() => "u",
}
/// Maps a binary operator to its precedence
-pub fn operator_prec(op: ast::BinOp) -> uint {
+pub fn operator_prec(op: ast::BinOp) -> usize {
match op {
// 'as' sits here with 12
- BiMul | BiDiv | BiRem => 11u,
- BiAdd | BiSub => 10u,
- BiShl | BiShr => 9u,
- BiBitAnd => 8u,
- BiBitXor => 7u,
- BiBitOr => 6u,
- BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3u,
- BiAnd => 2u,
- BiOr => 1u
+ BiMul | BiDiv | BiRem => 11us,
+ BiAdd | BiSub => 10us,
+ BiShl | BiShr => 9us,
+ BiBitAnd => 8us,
+ BiBitXor => 7us,
+ BiBitOr => 6us,
+ BiLt | BiLe | BiGe | BiGt | BiEq | BiNe => 3us,
+ BiAnd => 2us,
+ BiOr => 1us
}
}
/// Precedence of the `as` operator, which is a binary operator
/// not appearing in the prior table.
#[allow(non_upper_case_globals)]
-pub static as_prec: uint = 12u;
+pub static as_prec: usize = 12us;
pub fn empty_generics() -> Generics {
Generics {
P(dummy_spanned(MetaWord(name)))
}
-thread_local! { static NEXT_ATTR_ID: Cell<uint> = Cell::new(0) }
+thread_local! { static NEXT_ATTR_ID: Cell<usize> = Cell::new(0) }
pub fn mk_attr_id() -> AttrId {
let id = NEXT_ATTR_ID.with(|slot| {
use serialize::{Encodable, Decodable, Encoder, Decoder};
pub trait Pos {
- fn from_uint(n: uint) -> Self;
- fn to_uint(&self) -> uint;
+ fn from_usize(n: usize) -> Self;
+ fn to_usize(&self) -> usize;
}
/// A byte offset. Keep this small (currently 32-bits), as AST contains
/// is not equivalent to a character offset. The CodeMap will convert BytePos
/// values to CharPos values as necessary.
#[derive(Copy, PartialEq, Hash, PartialOrd, Show)]
-pub struct CharPos(pub uint);
+pub struct CharPos(pub usize);
// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix
// have been unsuccessful
impl Pos for BytePos {
- fn from_uint(n: uint) -> BytePos { BytePos(n as u32) }
- fn to_uint(&self) -> uint { let BytePos(n) = *self; n as uint }
+ fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
+ fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
}
impl Add for BytePos {
type Output = BytePos;
fn add(self, rhs: BytePos) -> BytePos {
- BytePos((self.to_uint() + rhs.to_uint()) as u32)
+ BytePos((self.to_usize() + rhs.to_usize()) as u32)
}
}
type Output = BytePos;
fn sub(self, rhs: BytePos) -> BytePos {
- BytePos((self.to_uint() - rhs.to_uint()) as u32)
+ BytePos((self.to_usize() - rhs.to_usize()) as u32)
}
}
impl Pos for CharPos {
- fn from_uint(n: uint) -> CharPos { CharPos(n) }
- fn to_uint(&self) -> uint { let CharPos(n) = *self; n }
+ fn from_usize(n: usize) -> CharPos { CharPos(n) }
+ fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
}
impl Add for CharPos {
type Output = CharPos;
fn add(self, rhs: CharPos) -> CharPos {
- CharPos(self.to_uint() + rhs.to_uint())
+ CharPos(self.to_usize() + rhs.to_usize())
}
}
type Output = CharPos;
fn sub(self, rhs: CharPos) -> CharPos {
- CharPos(self.to_uint() - rhs.to_uint())
+ CharPos(self.to_usize() - rhs.to_usize())
}
}
/// Information about the original source
pub file: Rc<FileMap>,
/// The (1-based) line number
- pub line: uint,
+ pub line: usize,
/// The (0-based) column offset
pub col: CharPos
}
// perhaps they should just be removed.
pub struct LocWithOpt {
pub filename: FileName,
- pub line: uint,
+ pub line: usize,
pub col: CharPos,
pub file: Option<Rc<FileMap>>,
}
// used to be structural records. Better names, anyone?
-pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: uint }
+pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: usize }
pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
/// The syntax with which a macro was invoked.
pub struct FileLines {
pub file: Rc<FileMap>,
- pub lines: Vec<uint>
+ pub lines: Vec<usize>
}
/// Identifies an offset of a multi-byte character in a FileMap
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
- pub bytes: uint,
+ pub bytes: usize,
}
/// A single source in the CodeMap
/// get a line from the list of pre-computed line-beginnings
///
- pub fn get_line(&self, line_number: uint) -> Option<String> {
+ pub fn get_line(&self, line_number: usize) -> Option<String> {
let lines = self.lines.borrow();
lines.get(line_number).map(|&line| {
let begin: BytePos = line - self.start_pos;
- let begin = begin.to_uint();
+ let begin = begin.to_usize();
let slice = &self.src[begin..];
match slice.find('\n') {
Some(e) => &slice[..e],
})
}
- pub fn record_multibyte_char(&self, pos: BytePos, bytes: uint) {
+ pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
assert!(bytes >=2 && bytes <= 4);
let mbc = MultiByteChar {
pos: pos,
let mut files = self.files.borrow_mut();
let start_pos = match files.last() {
None => 0,
- Some(last) => last.start_pos.to_uint() + last.src.len(),
+ Some(last) => last.start_pos.to_usize() + last.src.len(),
};
// Remove utf-8 BOM if any.
let filemap = Rc::new(FileMap {
name: filename,
src: src.to_string(),
- start_pos: Pos::from_uint(start_pos),
+ start_pos: Pos::from_usize(start_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
});
(format!("<{}:{}:{}>",
pos.file.name,
pos.line,
- pos.col.to_uint() + 1)).to_string()
+ pos.col.to_usize() + 1)).to_string()
}
/// Lookup source information about a BytePos
return (format!("{}:{}:{}: {}:{}",
lo.filename,
lo.line,
- lo.col.to_uint() + 1,
+ lo.col.to_usize() + 1,
hi.line,
- hi.col.to_uint() + 1)).to_string()
+ hi.col.to_usize() + 1)).to_string()
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
- for i in range(lo.line - 1u, hi.line as uint) {
+ for i in range(lo.line - 1us, hi.line as usize) {
lines.push(i);
};
FileLines {file: lo.file, lines: lines}
if begin.fm.start_pos != end.fm.start_pos {
None
} else {
- Some((&begin.fm.src[begin.pos.to_uint()..end.pos.to_uint()]).to_string())
+ Some((&begin.fm.src[begin.pos.to_usize()..end.pos.to_usize()]).to_string())
}
}
total_extra_bytes += mbc.bytes - 1;
// We should never see a byte position in the middle of a
// character
- assert!(bpos.to_uint() >= mbc.pos.to_uint() + mbc.bytes);
+ assert!(bpos.to_usize() >= mbc.pos.to_usize() + mbc.bytes);
} else {
break;
}
}
- assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
- CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
+ assert!(map.start_pos.to_usize() + total_extra_bytes <= bpos.to_usize());
+ CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes)
}
- fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
+ fn lookup_filemap_idx(&self, pos: BytePos) -> usize {
let files = self.files.borrow();
let files = &*files;
let len = files.len();
- let mut a = 0u;
+ let mut a = 0us;
let mut b = len;
- while b - a > 1u {
- let m = (a + b) / 2u;
+ while b - a > 1us {
+ let m = (a + b) / 2us;
if files[m].start_pos > pos {
b = m;
} else {
}
if a == 0 {
panic!("position {} does not resolve to a source location",
- pos.to_uint());
+ pos.to_usize());
}
a -= 1;
}
if a >= len {
panic!("position {} does not resolve to a source location",
- pos.to_uint())
+ pos.to_usize())
}
return a;
let files = self.files.borrow();
let f = (*files)[idx].clone();
- let mut a = 0u;
+ let mut a = 0us;
{
let lines = f.lines.borrow();
let mut b = lines.len();
- while b - a > 1u {
- let m = (a + b) / 2u;
+ while b - a > 1us {
+ let m = (a + b) / 2us;
if (*lines)[m] > pos { b = m; } else { a = m; }
}
}
fn lookup_pos(&self, pos: BytePos) -> Loc {
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
- let line = a + 1u; // Line numbers start at 1
+ let line = a + 1us; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos);
let linebpos = (*f.lines.borrow())[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
{
match id {
NO_EXPANSION => f(None),
- ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as uint]))
+ ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize]))
}
}
assert_eq!(file_lines.file.name, "blork.rs");
assert_eq!(file_lines.lines.len(), 1);
- assert_eq!(file_lines.lines[0], 1u);
+ assert_eq!(file_lines.lines[0], 1us);
}
#[test]
use term;
/// maximum number of lines we will print for each error; arbitrary.
-static MAX_LINES: uint = 6u;
+static MAX_LINES: usize = 6us;
#[derive(Clone, Copy)]
pub enum RenderSpan {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
panic!(FatalError);
}
+ pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
+ self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
+ panic!(FatalError);
+ }
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
- err_count: Cell<uint>,
+ err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
}
self.bump_err_count();
}
pub fn bump_err_count(&self) {
- self.err_count.set(self.err_count.get() + 1u);
+ self.err_count.set(self.err_count.get() + 1us);
}
- pub fn err_count(&self) -> uint {
+ pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
- self.err_count.get()> 0u
+ self.err_count.get() > 0us
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
- 0u => return,
- 1u => s = "aborting due to previous error".to_string(),
- _ => {
+ 0us => return,
+ 1us => s = "aborting due to previous error".to_string(),
+ _ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
- try!(t.write_str(&msg[..(msg.len()-1)]));
+ try!(t.write_str(&msg[..msg.len()-1]));
try!(t.reset());
try!(t.write_str("\n"));
} else {
let mut elided = false;
let mut display_lines = &lines.lines[];
if display_lines.len() > MAX_LINES {
- display_lines = &display_lines[0u..MAX_LINES];
+ display_lines = &display_lines[0us..MAX_LINES];
elided = true;
}
// Print the offending lines
}
}
if elided {
- let last_line = display_lines[display_lines.len() - 1u];
- let s = format!("{}:{} ", fm.name, last_line + 1u);
+ let last_line = display_lines[display_lines.len() - 1us];
+ let s = format!("{}:{} ", fm.name, last_line + 1us);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
- if lines.lines.len() == 1u {
+ if lines.lines.len() == 1us {
let lo = cm.lookup_char_pos(sp.lo);
- let mut digits = 0u;
- let mut num = (lines.lines[0] + 1u) / 10u;
+ let mut digits = 0us;
+ let mut num = (lines.lines[0] + 1us) / 10us;
// how many digits must be indent past?
- while num > 0u { num /= 10u; digits += 1u; }
+ while num > 0us { num /= 10us; digits += 1us; }
// indent past |name:## | and the 0-offset column location
- let left = fm.name.len() + digits + lo.col.to_uint() + 3u;
+ let left = fm.name.len() + digits + lo.col.to_usize() + 3us;
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line.
- let skip = fm.name.len() + digits + 3u;
+ let skip = fm.name.len() + digits + 3us;
for _ in range(0, skip) {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0]) {
- for pos in range(0u, left - skip) {
+ for pos in range(0us, left - skip) {
let cur_char = orig.as_bytes()[pos] as char;
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
let hi = cm.lookup_char_pos(sp.hi);
if hi.col != lo.col {
// the ^ already takes up one space
- let num_squigglies = hi.col.to_uint() - lo.col.to_uint() - 1u;
+ let num_squigglies = hi.col.to_usize() - lo.col.to_usize() - 1us;
for _ in range(0, num_squigglies) {
s.push('~');
}
let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1]+1);
let hi = cm.lookup_char_pos(sp.hi);
// Span seems to use half-opened interval, so subtract 1
- let skip = last_line_start.len() + hi.col.to_uint() - 1;
+ let skip = last_line_start.len() + hi.col.to_usize() - 1;
let mut s = String::new();
for _ in range(0, skip) {
s.push(' ');
($code:tt) => (__register_diagnostic! { $code })
}
+#[macro_export]
+macro_rules! span_fatal {
+ ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
+ __diagnostic_used!($code);
+ $session.span_fatal_with_code($span, format!($($message)*).as_slice(), stringify!($code))
+ })
+}
+
#[macro_export]
macro_rules! span_err {
($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({
)
}
+#[macro_export]
+macro_rules! register_long_diagnostics {
+ ($($code:tt: $description:tt),*) => (
+ $(register_diagnostic! { $code, $description })*
+ )
+}
}
()
});
+ with_registered_diagnostics(|diagnostics| {
+ if !diagnostics.contains_key(&code.name) {
+ ecx.span_err(span, &format!(
+ "used diagnostic code {} not registered", token::get_ident(code).get()
+ )[]);
+ }
+ });
MacExpr::new(quote_expr!(ecx, ()))
}
pub exported_macros: Vec<ast::MacroDef>,
pub syntax_env: SyntaxEnv,
- pub recursion_count: uint,
+ pub recursion_count: usize,
}
impl<'a> ExtCtxt<'a> {
fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
fn expr_field_access(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>;
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>,
- idx: uint) -> P<ast::Expr>;
+ idx: usize) -> P<ast::Expr>;
fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident>,
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr>;
- fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr>;
- fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr>;
+ fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr>;
+ fn expr_int(&self, sp: Span, i: isize) -> P<ast::Expr>;
fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr>;
fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr>;
fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
let field_name = token::get_ident(ident);
let field_span = Span {
- lo: sp.lo - Pos::from_uint(field_name.get().len()),
+ lo: sp.lo - Pos::from_usize(field_name.get().len()),
hi: sp.hi,
expn_id: sp.expn_id,
};
let id = Spanned { node: ident, span: field_span };
self.expr(sp, ast::ExprField(expr, id))
}
- fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> {
+ fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr> {
let field_span = Span {
- lo: sp.lo - Pos::from_uint(idx.to_string().len()),
+ lo: sp.lo - Pos::from_usize(idx.to_string().len()),
hi: sp.hi,
expn_id: sp.expn_id,
};
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr> {
self.expr(sp, ast::ExprLit(P(respan(sp, lit))))
}
- fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr> {
+ fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyUs(false))))
}
- fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr> {
+ fn expr_int(&self, sp: Span, i: isize) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyIs(false),
ast::Sign::new(i))))
}
let loc = self.codemap().lookup_char_pos(span.lo);
let expr_file = self.expr_str(span,
token::intern_and_get_ident(&loc.file.name[]));
- let expr_line = self.expr_uint(span, loc.line);
+ let expr_line = self.expr_usize(span, loc.line);
let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
self.expr_call_global(
EnumNonMatchingCollapsed (..) => {
cx.span_bug(trait_span,
&format!("non-matching enum variants in \
- `deriving({})`", name)[])
+ `derive({})`", name)[])
}
StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span,
- &format!("static method in `deriving({})`", name)[])
+ &format!("static method in `derive({})`", name)[])
}
}
None => {
cx.span_bug(trait_span,
&format!("unnamed field in normal struct in \
- `deriving({})`", name)[])
+ `derive({})`", name)[])
}
};
cx.field_imm(field.span, ident, subcall(field))
|cx, span, subexpr, self_f, other_fs| {
let other_f = match other_fs {
[ref o_f] => o_f,
- _ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialEq)`")
+ _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`")
};
let eq = cx.expr_binary(span, ast::BiEq, self_f, other_f.clone());
|cx, span, subexpr, self_f, other_fs| {
let other_f = match other_fs {
[ref o_f] => o_f,
- _ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialEq)`")
+ _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`")
};
let eq = cx.expr_binary(span, ast::BiNe, self_f, other_f.clone());
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
- _ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`"),
+ _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
};
let args = vec![
equals_expr.clone(),
box |cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 {
- cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
+ cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
} else {
some_ordering_collapsed(cx, span, PartialCmpOp, tag_tuple)
}
*/
let other_f = match other_fs {
[ref o_f] => o_f,
- _ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
+ _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
};
let cmp = cx.expr_binary(span, op, self_f.clone(), other_f.clone());
cx.expr_bool(span, equal),
box |cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 {
- cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`")
+ cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
} else {
let op = match (less, equal) {
(true, true) => LeOp, (true, false) => LtOp,
let block = cx.block(span, stmts, None);
cx.expr_block(block)
},
- box |cx, sp, _, _| cx.span_bug(sp, "non matching enums in deriving(Eq)?"),
+ box |cx, sp, _, _| cx.span_bug(sp, "non matching enums in derive(Eq)?"),
cx,
span,
substr)
let new = {
let other_f = match other_fs {
[ref o_f] => o_f,
- _ => cx.span_bug(span, "not exactly 2 arguments in `deriving(PartialOrd)`"),
+ _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
};
let args = vec![
cx.expr_path(equals_path.clone()),
box |cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 {
- cx.span_bug(span, "not exactly 2 arguments in `deriving(Ord)`")
+ cx.span_bug(span, "not exactly 2 arguments in `derives(Ord)`")
} else {
ordering_collapsed(cx, span, tag_tuple)
}
cx.expr_try(span,
cx.expr_method_call(span, blkdecoder.clone(), read_struct_field,
vec!(cx.expr_str(span, name),
- cx.expr_uint(span, field),
+ cx.expr_usize(span, field),
exprdecode.clone())))
});
let result = cx.expr_ok(trait_span, result);
cx.ident_of("read_struct"),
vec!(
cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
- cx.expr_uint(trait_span, nfields),
+ cx.expr_usize(trait_span, nfields),
cx.lambda_expr_1(trait_span, result, blkarg)
))
}
path,
parts,
|cx, span, _, field| {
- let idx = cx.expr_uint(span, field);
+ let idx = cx.expr_usize(span, field);
cx.expr_try(span,
cx.expr_method_call(span, blkdecoder.clone(), rvariant_arg,
vec!(idx, exprdecode.clone())))
});
arms.push(cx.arm(v_span,
- vec!(cx.pat_lit(v_span, cx.expr_uint(v_span, i))),
+ vec!(cx.pat_lit(v_span, cx.expr_usize(v_span, i))),
decoded));
}
cx.lambda_expr_1(trait_span, result, blkarg)
))
}
- _ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)")
+ _ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)")
};
}
/// Create a decoder for a single enum variant/struct:
/// - `outer_pat_path` is the path to this enum variant/struct
-/// - `getarg` should retrieve the `uint`-th field with name `@str`.
+/// - `getarg` should retrieve the `usize`-th field with name `@str`.
fn decode_static_fields<F>(cx: &mut ExtCtxt,
trait_span: Span,
outer_pat_path: ast::Path,
fields: &StaticFields,
mut getarg: F)
-> P<Expr> where
- F: FnMut(&mut ExtCtxt, Span, InternedString, uint) -> P<Expr>,
+ F: FnMut(&mut ExtCtxt, Span, InternedString, usize) -> P<Expr>,
{
match *fields {
Unnamed(ref fields) => {
StaticEnum(..) => {
cx.span_err(trait_span, "`Default` cannot be derived for enums, only structs");
// let compilation continue
- cx.expr_uint(trait_span, 0)
+ cx.expr_usize(trait_span, 0)
}
- _ => cx.span_bug(trait_span, "Non-static method in `deriving(Default)`")
+ _ => cx.span_bug(trait_span, "Non-static method in `derive(Default)`")
};
}
//!
//! ```ignore
//! #[derive(Encodable, Decodable)]
-//! struct Node { id: uint }
+//! struct Node { id: usize }
//! ```
//!
//! would generate two implementations like:
//! s.emit_struct("Node", 1, |this| {
//! this.emit_struct_field("id", 0, |this| {
//! Encodable::encode(&self.id, this)
-//! /* this.emit_uint(self.id) can also be used */
+//! /* this.emit_usize(self.id) can also be used */
//! })
//! })
//! }
let call = cx.expr_method_call(span, blkencoder.clone(),
emit_struct_field,
vec!(cx.expr_str(span, name),
- cx.expr_uint(span, i),
+ cx.expr_usize(span, i),
lambda));
// last call doesn't need a try!
cx.ident_of("emit_struct"),
vec!(
cx.expr_str(trait_span, token::get_ident(substr.type_ident)),
- cx.expr_uint(trait_span, fields.len()),
+ cx.expr_usize(trait_span, fields.len()),
blk
))
}
let lambda = cx.lambda_expr_1(span, enc, blkarg);
let call = cx.expr_method_call(span, blkencoder.clone(),
emit_variant_arg,
- vec!(cx.expr_uint(span, i),
+ vec!(cx.expr_usize(span, i),
lambda));
let call = if i != last {
cx.expr_try(span, call)
let call = cx.expr_method_call(trait_span, blkencoder,
cx.ident_of("emit_enum_variant"),
vec!(name,
- cx.expr_uint(trait_span, idx),
- cx.expr_uint(trait_span, fields.len()),
+ cx.expr_usize(trait_span, idx),
+ cx.expr_usize(trait_span, fields.len()),
blk));
let blk = cx.lambda_expr_1(trait_span, call, blkarg);
let ret = cx.expr_method_call(trait_span,
cx.expr_block(cx.block(trait_span, vec!(me), Some(ret)))
}
- _ => cx.bug("expected Struct or EnumMatching in deriving(Encodable)")
+ _ => cx.bug("expected Struct or EnumMatching in derive(Encodable)")
};
}
//! arguments:
//!
//! - `Struct`, when `Self` is a struct (including tuple structs, e.g
-//! `struct T(int, char)`).
+//! `struct T(i32, char)`).
//! - `EnumMatching`, when `Self` is an enum and all the arguments are the
//! same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`)
//! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments
//! following snippet
//!
//! ```rust
-//! struct A { x : int }
+//! struct A { x : i32 }
//!
-//! struct B(int);
+//! struct B(i32);
//!
//! enum C {
-//! C0(int),
-//! C1 { x: int }
+//! C0(i32),
+//! C1 { x: i32 }
//! }
//! ```
//!
-//! The `int`s in `B` and `C0` don't have an identifier, so the
+//! The `i32`s in `B` and `C0` don't have an identifier, so the
//! `Option<ident>`s would be `None` for them.
//!
//! In the static cases, the structure is summarised, either into the just
//! trait PartialEq {
//! fn eq(&self, other: &Self);
//! }
-//! impl PartialEq for int {
-//! fn eq(&self, other: &int) -> bool {
+//! impl PartialEq for i32 {
+//! fn eq(&self, other: &i32) -> bool {
//! *self == *other
//! }
//! }
//!
//! ```{.text}
//! Struct(vec![FieldInfo {
-//! span: <span of `int`>,
+//! span: <span of `i32`>,
//! name: None,
//! self_: <expr for &a>
//! other: vec![<expr for &b>]
//! ```{.text}
//! EnumMatching(0, <ast::Variant for C0>,
//! vec![FieldInfo {
-//! span: <span of int>
+//! span: <span of i32>
//! name: None,
//! self_: <expr for &a>,
//! other: vec![<expr for &b>]
//! StaticStruct(<ast::StructDef of B>, Unnamed(vec![<span of x>]))
//!
//! StaticEnum(<ast::EnumDef of C>,
-//! vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of int>])),
+//! vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of i32>])),
//! (<ident of C1>, <span of C1>, Named(vec![(<ident of x>, <span of x>)]))])
//! ```
/// Matching variants of the enum: variant index, ast::Variant,
/// fields: the field name is only non-`None` in the case of a struct
/// variant.
- EnumMatching(uint, &'a ast::Variant, Vec<FieldInfo>),
+ EnumMatching(usize, &'a ast::Variant, Vec<FieldInfo>),
/// Non-matching variants of the enum, but with all state hidden from
/// the consequent code. The first component holds `Ident`s for all of
/// ```
/// #[derive(PartialEq)]
- /// struct A { x: int, y: int }
+ /// struct A { x: i32, y: i32 }
///
/// // equivalent to:
/// impl PartialEq for A {
let mut raw_fields = Vec::new(); // ~[[fields of self],
// [fields of next Self arg], [etc]]
let mut patterns = Vec::new();
- for i in range(0u, self_args.len()) {
+ for i in range(0us, self_args.len()) {
let struct_path= cx.path(DUMMY_SP, vec!( type_ident ));
let (pat, ident_expr) =
trait_.create_struct_pattern(cx,
/// #[derive(PartialEq)]
/// enum A {
/// A1,
- /// A2(int)
+ /// A2(i32)
/// }
///
/// // is equivalent to
/// (&A2(ref __self_0),
/// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)),
/// _ => {
- /// let __self_vi = match *self { A1(..) => 0u, A2(..) => 1u };
- /// let __arg_1_vi = match *__arg_1 { A1(..) => 0u, A2(..) => 1u };
+ /// let __self_vi = match *self { A1(..) => 0us, A2(..) => 1us };
+ /// let __arg_1_vi = match *__arg_1 { A1(..) => 0us, A2(..) => 1us };
/// false
/// }
/// }
/// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2
/// ...
/// _ => {
- /// let __this_vi = match this { Variant1 => 0u, Variant2 => 1u, ... };
- /// let __that_vi = match that { Variant1 => 0u, Variant2 => 1u, ... };
+ /// let __this_vi = match this { Variant1 => 0us, Variant2 => 1us, ... };
+ /// let __that_vi = match that { Variant1 => 0us, Variant2 => 1us, ... };
/// ... // catch-all remainder can inspect above variant index values.
/// }
/// }
.collect::<Vec<ast::Ident>>();
// The `vi_idents` will be bound, solely in the catch-all, to
- // a series of let statements mapping each self_arg to a uint
+ // a series of let statements mapping each self_arg to a usize
// corresponding to its variant index.
let vi_idents: Vec<ast::Ident> = self_arg_names.iter()
.map(|name| { let vi_suffix = format!("{}_vi", &name[]);
}).collect();
// Build a series of let statements mapping each self_arg
- // to a uint corresponding to its variant index.
+ // to a usize corresponding to its variant index.
// i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving
// with three Self args, builds three statements:
//
// ```
// let __self0_vi = match self {
- // A => 0u, B(..) => 1u, C(..) => 2u
+ // A => 0us, B(..) => 1us, C(..) => 2us
// };
// let __self1_vi = match __arg1 {
- // A => 0u, B(..) => 1u, C(..) => 2u
+ // A => 0us, B(..) => 1us, C(..) => 2us
// };
// let __self2_vi = match __arg2 {
- // A => 0u, B(..) => 1u, C(..) => 2u
+ // A => 0us, B(..) => 1us, C(..) => 2us
// };
// ```
let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new();
to_set.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
call_site: to_set,
callee: codemap::NameAndSpan {
- name: format!("deriving({})", trait_name),
+ name: format!("derive({})", trait_name),
format: codemap::MacroAttribute,
span: Some(self.span)
}
Raw(ast::Mutability),
}
-/// A path, e.g. `::std::option::Option::<int>` (global). Has support
+/// A path, e.g. `::std::option::Option::<i32>` (global). Has support
/// for type parameters and a lifetime.
#[derive(Clone)]
pub struct Path<'a> {
/// &/Box/ Ty
Ptr(Box<Ty<'a>>, PtrTy<'a>),
/// mod::mod::Type<[lifetime], [Params...]>, including a plain type
- /// parameter, and things like `int`
+ /// parameter, and things like `i32`
Literal(Path<'a>),
/// includes unit
Tuple(Vec<Ty<'a>> )
Literal(ref p) => {
p.to_path(cx, span, self_ty, self_generics)
}
- Ptr(..) => { cx.span_bug(span, "pointer in a path in generic `deriving`") }
- Tuple(..) => { cx.span_bug(span, "tuple in a path in generic `deriving`") }
+ Ptr(..) => { cx.span_bug(span, "pointer in a path in generic `derive`") }
+ Tuple(..) => { cx.span_bug(span, "tuple in a path in generic `derive`") }
}
}
}
// iteration function.
let discriminant = match variant.node.disr_expr {
Some(ref d) => d.clone(),
- None => cx.expr_uint(trait_span, index)
+ None => cx.expr_usize(trait_span, index)
};
stmts.push(call_hash(trait_span, discriminant));
fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P<Expr> {
let n = match substr.nonself_args {
[ref n] => n,
- _ => cx.span_bug(trait_span, "incorrect number of arguments in `deriving(FromPrimitive)`")
+ _ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(FromPrimitive)`")
};
match *substr.fields {
cx.expr_match(trait_span, n.clone(), arms)
}
- _ => cx.span_bug(trait_span, "expected StaticEnum in deriving(FromPrimitive)")
+ _ => cx.span_bug(trait_span, "expected StaticEnum in derive(FromPrimitive)")
}
}
fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P<Expr> {
let rng = match substr.nonself_args {
[ref rng] => rng,
- _ => cx.bug("Incorrect number of arguments to `rand` in `deriving(Rand)`")
+ _ => cx.bug("Incorrect number of arguments to `rand` in `derive(Rand)`")
};
let rand_ident = vec!(
cx.ident_of("std"),
if variants.is_empty() {
cx.span_err(trait_span, "`Rand` cannot be derived for enums with no variants");
// let compilation continue
- return cx.expr_uint(trait_span, 0);
+ return cx.expr_usize(trait_span, 0);
}
- let variant_count = cx.expr_uint(trait_span, variants.len());
+ let variant_count = cx.expr_usize(trait_span, variants.len());
let rand_name = cx.path_all(trait_span,
true,
variant_count);
let mut arms = variants.iter().enumerate().map(|(i, &(ident, v_span, ref summary))| {
- let i_expr = cx.expr_uint(v_span, i);
+ let i_expr = cx.expr_usize(v_span, i);
let pat = cx.pat_lit(v_span, i_expr);
let path = cx.path(v_span, vec![substr.type_ident, ident]);
let block = cx.block(trait_span, vec!( let_statement ), Some(match_expr));
cx.expr_block(block)
}
- _ => cx.bug("Non-static method in `deriving(Rand)`")
+ _ => cx.bug("Non-static method in `derive(Rand)`")
};
fn rand_thing<F>(cx: &mut ExtCtxt,
let e = match os::getenv(var.get()) {
None => {
cx.span_err(sp, msg.get());
- cx.expr_uint(sp, 0)
+ cx.expr_usize(sp, 0)
}
Some(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[]))
};
// in this file.
// Token-tree macros:
MacInvocTT(pth, tts, _) => {
- if pth.segments.len() > 1u {
+ if pth.segments.len() > 1us {
fld.cx.span_err(pth.span,
"expected macro name without module \
separators");
},
_ => unreachable!()
};
- if pth.segments.len() > 1u {
+ if pth.segments.len() > 1us {
fld.cx.span_err(pth.span, "expected macro name without module separators");
return DummyResult::raw_pat(span);
}
pub struct ExpansionConfig {
pub crate_name: String,
pub enable_quotes: bool,
- pub recursion_limit: uint,
+ pub recursion_limit: usize,
}
impl ExpansionConfig {
#[should_fail]
#[test] fn macros_cant_escape_fns_test () {
let src = "fn bogus() {macro_rules! z (() => (3+4));}\
- fn inty() -> int { z!() }".to_string();
+ fn inty() -> i32 { z!() }".to_string();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_string(),
#[should_fail]
#[test] fn macros_cant_escape_mods_test () {
let src = "mod foo {macro_rules! z (() => (3+4));}\
- fn inty() -> int { z!() }".to_string();
+ fn inty() -> i32 { z!() }".to_string();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_string(),
// macro_use modules should allow macros to escape
#[test] fn macros_can_escape_flattened_mods_test () {
let src = "#[macro_use] mod foo {macro_rules! z (() => (3+4));}\
- fn inty() -> int { z!() }".to_string();
+ fn inty() -> i32 { z!() }".to_string();
let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_string(),
// should be able to use a bound identifier as a literal in a macro definition:
#[test] fn self_macro_parsing(){
expand_crate_str(
- "macro_rules! foo ((zz) => (287u;));
- fn f(zz : int) {foo!(zz);}".to_string()
+ "macro_rules! foo ((zz) => (287;));
+ fn f(zz: i32) {foo!(zz);}".to_string()
);
}
// in principle, you might want to control this boolean on a per-varref basis,
// but that would make things even harder to understand, and might not be
// necessary for thorough testing.
- type RenamingTest = (&'static str, Vec<Vec<uint>>, bool);
+ type RenamingTest = (&'static str, Vec<Vec<usize>>, bool);
#[test]
fn automatic_renaming () {
let tests: Vec<RenamingTest> =
vec!(// b & c should get new names throughout, in the expr too:
- ("fn a() -> int { let b = 13; let c = b; b+c }",
+ ("fn a() -> i32 { let b = 13; let c = b; b+c }",
vec!(vec!(0,1),vec!(2)), false),
// both x's should be renamed (how is this causing a bug?)
- ("fn main () {let x: int = 13;x;}",
+ ("fn main () {let x: i32 = 13;x;}",
vec!(vec!(0)), false),
// the use of b after the + should be renamed, the other one not:
- ("macro_rules! f (($x:ident) => (b + $x)); fn a() -> int { let b = 13; f!(b)}",
+ ("macro_rules! f (($x:ident) => (b + $x)); fn a() -> i32 { let b = 13; f!(b)}",
vec!(vec!(1)), false),
// the b before the plus should not be renamed (requires marks)
- ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})); fn a() -> int { f!(b)}",
+ ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})); fn a() -> i32 { f!(b)}",
vec!(vec!(1)), false),
// the marks going in and out of letty should cancel, allowing that $x to
// capture the one following the semicolon.
// this was an awesome test case, and caught a *lot* of bugs.
("macro_rules! letty(($x:ident) => (let $x = 15;));
macro_rules! user(($x:ident) => ({letty!($x); $x}));
- fn main() -> int {user!(z)}",
+ fn main() -> i32 {user!(z)}",
vec!(vec!(0)), false)
);
for (idx,s) in tests.iter().enumerate() {
// can't write this test case until we have macro-generating macros.
// method arg hygiene
- // method expands to fn get_x(&self_0, x_1:int) {self_0 + self_2 + x_3 + x_1}
+ // method expands to fn get_x(&self_0, x_1: i32) {self_0 + self_2 + x_3 + x_1}
#[test] fn method_arg_hygiene(){
run_renaming_test(
&("macro_rules! inject_x (()=>(x));
macro_rules! inject_self (()=>(self));
struct A;
- impl A{fn get_x(&self, x: int) {self + inject_self!() + inject_x!() + x;} }",
+ impl A{fn get_x(&self, x: i32) {self + inject_self!() + inject_x!() + x;} }",
vec!(vec!(0),vec!(3)),
true),
0)
}
// item fn hygiene
- // expands to fn q(x_1:int){fn g(x_2:int){x_2 + x_1};}
+ // expands to fn q(x_1: i32){fn g(x_2: i32){x_2 + x_1};}
#[test] fn issue_9383(){
run_renaming_test(
- &("macro_rules! bad_macro (($ex:expr) => (fn g(x:int){ x + $ex }));
- fn q(x:int) { bad_macro!(x); }",
+ &("macro_rules! bad_macro (($ex:expr) => (fn g(x: i32){ x + $ex }));
+ fn q(x: i32) { bad_macro!(x); }",
vec!(vec!(1),vec!(0)),true),
0)
}
// closure arg hygiene (ExprClosure)
- // expands to fn f(){(|x_1 : int| {(x_2 + x_1)})(3);}
+ // expands to fn f(){(|x_1 : i32| {(x_2 + x_1)})(3);}
#[test] fn closure_arg_hygiene(){
run_renaming_test(
&("macro_rules! inject_x (()=>(x));
- fn f(){(|x : int| {(inject_x!() + x)})(3);}",
+ fn f(){(|x : i32| {(inject_x!() + x)})(3);}",
vec!(vec!(1)),
true),
0)
// macro_rules in method position. Sadly, unimplemented.
#[test] fn macro_in_method_posn(){
expand_crate_str(
- "macro_rules! my_method (() => (fn thirteen(&self) -> int {13}));
+ "macro_rules! my_method (() => (fn thirteen(&self) -> i32 {13}));
struct A;
impl A{ my_method!(); }
fn f(){A.thirteen;}".to_string());
}
// run one of the renaming tests
- fn run_renaming_test(t: &RenamingTest, test_idx: uint) {
+ fn run_renaming_test(t: &RenamingTest, test_idx: usize) {
let invalid_name = token::special_idents::invalid.name;
let (teststr, bound_connections, bound_ident_check) = match *t {
(ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic)
// it's the name of a 0-ary variant, and that 'i' appears twice in succession.
#[test]
fn crate_bindings_test(){
- let the_crate = string_to_crate("fn main (a : int) -> int {|b| {
+ let the_crate = string_to_crate("fn main (a: i32) -> i32 {|b| {
match 34 {None => 3, Some(i) | i => j, Foo{k:z,l:y} => \"banana\"}} }".to_string());
let idents = crate_bindings(&the_crate);
assert_eq!(idents, strs_to_idents(vec!("a","b","None","i","i","z","y")));
// test the IdentRenamer directly
#[test]
fn ident_renamer_test () {
- let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string());
+ let the_crate = string_to_crate("fn f(x: i32){let x = x; x}".to_string());
let f_ident = token::str_to_ident("f");
let x_ident = token::str_to_ident("x");
- let int_ident = token::str_to_ident("int");
+ let int_ident = token::str_to_ident("i32");
let renames = vec!((x_ident,Name(16)));
let mut renamer = IdentRenamer{renames: &renames};
let renamed_crate = renamer.fold_crate(the_crate);
// test the PatIdentRenamer; only PatIdents get renamed
#[test]
fn pat_ident_renamer_test () {
- let the_crate = string_to_crate("fn f(x : int){let x = x; x}".to_string());
+ let the_crate = string_to_crate("fn f(x: i32){let x = x; x}".to_string());
let f_ident = token::str_to_ident("f");
let x_ident = token::str_to_ident("x");
- let int_ident = token::str_to_ident("int");
+ let int_ident = token::str_to_ident("i32");
let renames = vec!((x_ident,Name(16)));
let mut renamer = PatIdentRenamer{renames: &renames};
let renamed_crate = renamer.fold_crate(the_crate);
}
enum Position {
- Exact(uint),
+ Exact(usize),
Named(String),
}
/// Stays `true` if all formatting parameters are default (as in "{}{}").
all_pieces_simple: bool,
- name_positions: HashMap<String, uint>,
+ name_positions: HashMap<String, usize>,
/// Updated as arguments are consumed or methods are entered
- nest_level: uint,
- next_arg: uint,
+ nest_level: usize,
+ next_arg: usize,
}
/// Parses the arguments from the given list of tokens, returning None
match c {
parse::CountIs(i) => {
self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIs"),
- vec!(self.ecx.expr_uint(sp, i)))
+ vec!(self.ecx.expr_usize(sp, i)))
}
parse::CountIsParam(i) => {
self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIsParam"),
- vec!(self.ecx.expr_uint(sp, i)))
+ vec!(self.ecx.expr_usize(sp, i)))
}
parse::CountImplied => {
let path = self.ecx.path_global(sp, Context::rtpath(self.ecx,
};
let i = i + self.args.len();
self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIsParam"),
- vec!(self.ecx.expr_uint(sp, i)))
+ vec!(self.ecx.expr_usize(sp, i)))
}
}
}
}
parse::ArgumentIs(i) => {
self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "ArgumentIs"),
- vec!(self.ecx.expr_uint(sp, i)))
+ vec!(self.ecx.expr_usize(sp, i)))
}
// Named arguments are converted to positional arguments at
// the end of the list of arguments
};
let i = i + self.args.len();
self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "ArgumentIs"),
- vec!(self.ecx.expr_uint(sp, i)))
+ vec!(self.ecx.expr_usize(sp, i)))
}
};
}
};
let align = self.ecx.expr_path(align);
- let flags = self.ecx.expr_uint(sp, arg.format.flags);
+ let flags = self.ecx.expr_usize(sp, arg.format.flags);
let prec = self.trans_count(arg.format.precision);
let width = self.trans_count(arg.format.width);
let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec"));
}
let resolved = {
- let result = (*table.table.borrow())[id.ctxt as uint];
+ let result = (*table.table.borrow())[id.ctxt as usize];
match result {
EmptyCtxt => id.name,
// ignore marks here:
let mut result = Vec::new();
let mut loopvar = ctxt;
loop {
- let table_entry = (*table.table.borrow())[loopvar as uint];
+ let table_entry = (*table.table.borrow())[loopvar as usize];
match table_entry {
EmptyCtxt => {
return result;
/// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| {
- match (*sctable.table.borrow())[ctxt as uint] {
+ match (*sctable.table.borrow())[ctxt as usize] {
Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark")
}
let mut result = Vec::new();
loop {
let table = table.table.borrow();
- match (*table)[sc as uint] {
+ match (*table)[sc as usize] {
EmptyCtxt => {return result;},
Mark(mrk,tail) => {
result.push(M(mrk));
assert_eq! (marksof_internal (ans, stopname,&t), vec!(16));}
// rename where stop doesn't match:
{ let chain = vec!(M(9),
- R(id(name1.uint() as u32,
+ R(id(name1.usize() as u32,
apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)),
M(14));
// rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9),
- R(id(name1.uint() as u32, name1sc),
+ R(id(name1.usize() as u32, name1sc),
stopname),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
}
token::Literal(token::StrRaw(ident, n), suf) => {
- return mk_lit!("StrRaw", suf, mk_name(cx, sp, ident.ident()), cx.expr_uint(sp, n))
+ return mk_lit!("StrRaw", suf, mk_name(cx, sp, ident.ident()), cx.expr_usize(sp, n))
}
token::Ident(ident, style) => {
// try removing it when enough of them are gone.
let mut p = cx.new_parser_from_tts(tts);
- p.quote_depth += 1u;
+ p.quote_depth += 1us;
let cx_expr = p.parse_expr();
if !p.eat(&token::Comma) {
let topmost = cx.original_span_in_file();
let loc = cx.codemap().lookup_char_pos(topmost.lo);
- base::MacExpr::new(cx.expr_uint(topmost, loc.line))
+ base::MacExpr::new(cx.expr_usize(topmost, loc.line))
}
/* column!(): expands to the current column number */
let topmost = cx.original_span_in_file();
let loc = cx.codemap().lookup_char_pos(topmost.lo);
- base::MacExpr::new(cx.expr_uint(topmost, loc.col.to_uint()))
+ base::MacExpr::new(cx.expr_usize(topmost, loc.col.to_usize()))
}
/// file!(): expands to the current filename */
}
impl TokenTreeOrTokenTreeVec {
- fn len(&self) -> uint {
+ fn len(&self) -> usize {
match self {
&TtSeq(ref v) => v.len(),
&Tt(ref tt) => tt.len(),
}
}
- fn get_tt(&self, index: uint) -> TokenTree {
+ fn get_tt(&self, index: usize) -> TokenTree {
match self {
&TtSeq(ref v) => v[index].clone(),
&Tt(ref tt) => tt.get_tt(index),
#[derive(Clone)]
struct MatcherTtFrame {
elts: TokenTreeOrTokenTreeVec,
- idx: uint,
+ idx: usize,
}
#[derive(Clone)]
stack: Vec<MatcherTtFrame>,
top_elts: TokenTreeOrTokenTreeVec,
sep: Option<Token>,
- idx: uint,
+ idx: usize,
up: Option<Box<MatcherPos>>,
matches: Vec<Vec<Rc<NamedMatch>>>,
- match_lo: uint,
- match_cur: uint,
- match_hi: uint,
+ match_lo: usize,
+ match_cur: usize,
+ match_hi: usize,
sp_lo: BytePos,
}
-pub fn count_names(ms: &[TokenTree]) -> uint {
+pub fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match elt {
&TtSequence(_, ref seq) => {
stack: vec![],
top_elts: TtSeq(ms),
sep: sep,
- idx: 0u,
+ idx: 0us,
up: None,
matches: matches,
- match_lo: 0u,
- match_cur: 0u,
+ match_lo: 0us,
+ match_cur: 0us,
match_hi: match_idx_hi,
sp_lo: lo
}
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
- ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
+ ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
match m {
&TtSequence(_, ref seq) => {
for next_m in seq.tts.iter() {
}
}
let mut ret_val = HashMap::new();
- let mut idx = 0u;
+ let mut idx = 0us;
for m in ms.iter() { n_rec(p_s, m, res, &mut ret_val, &mut idx) }
ret_val
}
if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures;
- new_ei.idx += 1u;
+ new_ei.idx += 1us;
//we specifically matched zero repeats.
for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) {
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
cur_eis.push(box MatcherPos {
stack: vec![],
sep: seq.separator.clone(),
- idx: 0u,
+ idx: 0us,
matches: matches,
match_lo: ei_t.match_cur,
match_cur: ei_t.match_cur,
/* error messages here could be improved with links to orig. rules */
if token_name_eq(&tok, &token::Eof) {
- if eof_eis.len() == 1u {
+ if eof_eis.len() == 1us {
let mut v = Vec::new();
for dv in (&mut eof_eis[0]).matches.iter_mut() {
v.push(dv.pop().unwrap());
}
return Success(nameize(sess, ms, &v[]));
- } else if eof_eis.len() > 1u {
+ } else if eof_eis.len() > 1us {
return Error(sp, "ambiguity: multiple successful parses".to_string());
} else {
return Failure(sp, "unexpected end of macro invocation".to_string());
}
} else {
- if (bb_eis.len() > 0u && next_eis.len() > 0u)
- || bb_eis.len() > 1u {
+ if (bb_eis.len() > 0us && next_eis.len() > 0us)
+ || bb_eis.len() > 1us {
let nts = bb_eis.iter().map(|ei| {
match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(bind, name, _, _)) => {
"local ambiguity: multiple parsing options: \
built-in NTs {} or {} other options.",
nts, next_eis.len()).to_string());
- } else if bb_eis.len() == 0u && next_eis.len() == 0u {
+ } else if bb_eis.len() == 0us && next_eis.len() == 0us {
return Failure(sp, format!("no rules expected the token `{}`",
pprust::token_to_string(&tok)).to_string());
- } else if next_eis.len() > 0u {
+ } else if next_eis.len() > 0us {
/* Now process the next token */
- while next_eis.len() > 0u {
+ while next_eis.len() > 0us {
cur_eis.push(next_eis.pop().unwrap());
}
rdr.next_token();
let match_cur = ei.match_cur;
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
parse_nt(&mut rust_parser, name_string.get()))));
- ei.idx += 1u;
+ ei.idx += 1us;
ei.match_cur += 1;
}
_ => panic!()
}
}
- assert!(cur_eis.len() > 0u);
+ assert!(cur_eis.len() > 0us);
}
}
pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
match name {
"tt" => {
- p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
+ p.quote_depth += 1us; //but in theory, non-quoted tts might be useful
let res = token::NtTT(P(p.parse_token_tree()));
- p.quote_depth -= 1u;
+ p.quote_depth -= 1us;
return res;
}
_ => {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast::{TokenTree, TtDelimited, TtSequence, TtToken};
-use ast;
+use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken};
use codemap::{Span, DUMMY_SP};
use ext::base::{ExtCtxt, MacResult, SyntaxExtension};
use ext::base::{NormalTT, TTMacroExpander};
use parse::lexer::{new_tt_reader, new_tt_reader_with_doc_flag};
use parse::parser::Parser;
use parse::attr::ParserAttr;
-use parse::token::{special_idents, gensym_ident, NtTT, Token};
+use parse::token::{self, special_idents, gensym_ident, NtTT, Token};
use parse::token::Token::*;
-use parse::token;
use print;
use ptr::P;
let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() };
// If T' is in the set FOLLOW(NT), continue. Else, reject.
- match &next_token {
- &Eof => return Some((sp, tok.clone())),
- _ if is_in_follow(cx, &next_token, frag_spec.as_str()) => continue,
- next => {
+ match (&next_token, is_in_follow(cx, &next_token, frag_spec.as_str())) {
+ (&Eof, _) => return Some((sp, tok.clone())),
+ (_, Ok(true)) => continue,
+ (next, Ok(false)) => {
cx.span_err(sp, format!("`${0}:{1}` is followed by `{2}`, which \
is not allowed for `{1}` fragments",
name.as_str(), frag_spec.as_str(),
token_to_string(next)).as_slice());
continue
},
+ (_, Err(msg)) => {
+ cx.span_err(sp, msg.as_slice());
+ continue
+ }
}
},
TtSequence(sp, ref seq) => {
last
}
-fn is_in_follow(cx: &ExtCtxt, tok: &Token, frag: &str) -> bool {
+fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
if let &CloseDelim(_) = tok {
- return true;
- }
-
- match frag {
- "item" => {
- // since items *must* be followed by either a `;` or a `}`, we can
- // accept anything after them
- true
- },
- "block" => {
- // anything can follow block, the braces provide a easy boundary to
- // maintain
- true
- },
- "stmt" | "expr" => {
- match *tok {
- FatArrow | Comma | Semi => true,
- _ => false
- }
- },
- "pat" => {
- match *tok {
- FatArrow | Comma | Eq => true,
- _ => false
- }
- },
- "path" | "ty" => {
- match *tok {
- Comma | FatArrow | Colon | Eq | Gt => true,
- Ident(i, _) if i.as_str() == "as" => true,
- _ => false
- }
- },
- "ident" => {
- // being a single token, idents are harmless
- true
- },
- "meta" | "tt" => {
- // being either a single token or a delimited sequence, tt is
- // harmless
- true
- },
- _ => cx.bug(format!("unrecognized builtin nonterminal {}",
- frag).as_slice()),
+ Ok(true)
+ } else {
+ match frag {
+ "item" => {
+ // since items *must* be followed by either a `;` or a `}`, we can
+ // accept anything after them
+ Ok(true)
+ },
+ "block" => {
+ // anything can follow block, the braces provide a easy boundary to
+ // maintain
+ Ok(true)
+ },
+ "stmt" | "expr" => {
+ match *tok {
+ FatArrow | Comma | Semi => Ok(true),
+ _ => Ok(false)
+ }
+ },
+ "pat" => {
+ match *tok {
+ FatArrow | Comma | Eq => Ok(true),
+ _ => Ok(false)
+ }
+ },
+ "path" | "ty" => {
+ match *tok {
+ Comma | FatArrow | Colon | Eq | Gt => Ok(true),
+ Ident(i, _) if i.as_str() == "as" => Ok(true),
+ _ => Ok(false)
+ }
+ },
+ "ident" => {
+ // being a single token, idents are harmless
+ Ok(true)
+ },
+ "meta" | "tt" => {
+ // being either a single token or a delimited sequence, tt is
+ // harmless
+ Ok(true)
+ },
+ _ => Err(format!("unrecognized builtin nonterminal `{}`", frag))
+ }
}
}
#[derive(Clone)]
struct TtFrame {
forest: TokenTree,
- idx: uint,
+ idx: usize,
dotdotdoted: bool,
sep: Option<Token>,
}
// Some => return imported_from as the next token
crate_name_next: Option<Span>,
- repeat_idx: Vec<uint>,
- repeat_len: Vec<uint>,
+ repeat_idx: Vec<usize>,
+ repeat_len: Vec<usize>,
/* cached: */
pub cur_tok: Token,
pub cur_span: Span,
#[derive(Clone)]
enum LockstepIterSize {
LisUnconstrained,
- LisConstraint(uint, Ident),
+ LisConstraint(usize, Ident),
LisContradiction(String),
}
r.repeat_len.pop();
}
} else { /* repeat */
- *r.repeat_idx.last_mut().unwrap() += 1u;
+ *r.repeat_idx.last_mut().unwrap() += 1us;
r.stack.last_mut().unwrap().idx = 0;
match r.stack.last().unwrap().sep.clone() {
Some(tk) => {
noop_fold_ident(i, self)
}
- fn fold_uint(&mut self, i: uint) -> uint {
- noop_fold_uint(i, self)
+ fn fold_usize(&mut self, i: usize) -> usize {
+ noop_fold_usize(i, self)
}
fn fold_path(&mut self, p: Path) -> Path {
i
}
-pub fn noop_fold_uint<T: Folder>(i: uint, _: &mut T) -> uint {
+pub fn noop_fold_usize<T: Folder>(i: usize, _: &mut T) -> usize {
i
}
}
ExprTupField(el, ident) => {
ExprTupField(folder.fold_expr(el),
- respan(ident.span, folder.fold_uint(ident.node)))
+ respan(ident.span, folder.fold_usize(ident.node)))
}
ExprIndex(el, er) => {
ExprIndex(folder.fold_expr(el), folder.fold_expr(er))
use std::io;
use std::str;
use std::string::String;
-use std::uint;
+use std::usize;
#[derive(Clone, Copy, PartialEq)]
pub enum CommentStyle {
pub fn strip_doc_comment_decoration(comment: &str) -> String {
/// remove whitespace-only lines from the start/end of lines
fn vertical_trim(lines: Vec<String> ) -> Vec<String> {
- let mut i = 0u;
+ let mut i = 0us;
let mut j = lines.len();
// first line of all-stars should be omitted
if lines.len() > 0 &&
/// remove a "[ \t]*\*" block from each line, if possible
fn horizontal_trim(lines: Vec<String> ) -> Vec<String> {
- let mut i = uint::MAX;
+ let mut i = usize::MAX;
let mut can_trim = true;
let mut first = true;
for line in lines.iter() {
if can_trim {
lines.iter().map(|line| {
- (&line[(i + 1)..line.len()]).to_string()
+ (&line[i + 1..line.len()]).to_string()
}).collect()
} else {
lines
}
if comment.starts_with("/*") {
- let lines = comment[3u..(comment.len() - 2u)]
+ let lines = comment[3..comment.len() - 2]
.lines_any()
.map(|s| s.to_string())
.collect::<Vec<String> >();
fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader,
comments: &mut Vec<Comment>) {
while is_whitespace(rdr.curr) && !rdr.is_eof() {
- if rdr.col == CharPos(0u) && rdr.curr_is('\n') {
+ if rdr.col == CharPos(0us) && rdr.curr_is('\n') {
push_blank_line_comment(rdr, &mut *comments);
}
rdr.bump();
/// Returns None if the first col chars of s contain a non-whitespace char.
/// Otherwise returns Some(k) where k is first char offset after that leading
/// whitespace. Note k may be outside bounds of s.
-fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
+fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
let len = s.len();
- let mut col = col.to_uint();
- let mut cursor: uint = 0;
+ let mut col = col.to_usize();
+ let mut cursor: usize = 0;
while col > 0 && cursor < len {
let r: str::CharRange = s.char_range_at(cursor);
if !r.ch.is_whitespace() {
assert!(!curr_line.contains_char('\n'));
lines.push(curr_line);
} else {
- let mut level: int = 1;
+ let mut level: isize = 1;
while level > 0 {
debug!("=== block comment level {}", level);
if rdr.is_eof() {
let mut style = if code_to_the_left { Trailing } else { Isolated };
rdr.consume_non_eol_whitespace();
- if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1u {
+ if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1us {
style = Mixed;
}
debug!("<<< block comment");
}
#[test] fn test_block_doc_comment_3() {
- let comment = "/**\n let a: *int;\n *a = 5;\n*/";
+ let comment = "/**\n let a: *i32;\n *a = 5;\n*/";
let stripped = strip_doc_comment_decoration(comment);
- assert_eq!(stripped, " let a: *int;\n *a = 5;");
+ assert_eq!(stripped, " let a: *i32;\n *a = 5;");
}
#[test] fn test_block_doc_comment_4() {
/// offending string to the error message
fn fatal_span_verbose(&self, from_pos: BytePos, to_pos: BytePos, mut m: String) -> ! {
m.push_str(": ");
- let from = self.byte_offset(from_pos).to_uint();
- let to = self.byte_offset(to_pos).to_uint();
+ let from = self.byte_offset(from_pos).to_usize();
+ let to = self.byte_offset(to_pos).to_usize();
m.push_str(&self.filemap.src[from..to]);
self.fatal_span_(from_pos, to_pos, &m[]);
}
F: FnOnce(&str) -> T,
{
f(self.filemap.src.slice(
- self.byte_offset(start).to_uint(),
- self.byte_offset(end).to_uint()))
+ self.byte_offset(start).to_usize(),
+ self.byte_offset(end).to_usize()))
}
/// Converts CRLF to LF in the given string, raising an error on bare CR.
fn translate_crlf<'b>(&self, start: BytePos,
s: &'b str, errmsg: &'b str) -> CowString<'b> {
- let mut i = 0u;
+ let mut i = 0us;
while i < s.len() {
let str::CharRange { ch, next } = s.char_range_at(i);
if ch == '\r' {
return s.into_cow();
fn translate_crlf_(rdr: &StringReader, start: BytePos,
- s: &str, errmsg: &str, mut i: uint) -> String {
+ s: &str, errmsg: &str, mut i: usize) -> String {
let mut buf = String::with_capacity(s.len());
let mut j = 0;
while i < s.len() {
/// discovered, add it to the FileMap's list of line start offsets.
pub fn bump(&mut self) {
self.last_pos = self.pos;
- let current_byte_offset = self.byte_offset(self.pos).to_uint();
+ let current_byte_offset = self.byte_offset(self.pos).to_usize();
if current_byte_offset < self.filemap.src.len() {
assert!(self.curr.is_some());
let last_char = self.curr.unwrap();
.src
.char_range_at(current_byte_offset);
let byte_offset_diff = next.next - current_byte_offset;
- self.pos = self.pos + Pos::from_uint(byte_offset_diff);
+ self.pos = self.pos + Pos::from_usize(byte_offset_diff);
self.curr = Some(next.ch);
- self.col = self.col + CharPos(1u);
+ self.col = self.col + CharPos(1us);
if last_char == '\n' {
self.filemap.next_line(self.last_pos);
- self.col = CharPos(0u);
+ self.col = CharPos(0us);
}
if byte_offset_diff > 1 {
}
pub fn nextch(&self) -> Option<char> {
- let offset = self.byte_offset(self.pos).to_uint();
+ let offset = self.byte_offset(self.pos).to_usize();
if offset < self.filemap.src.len() {
Some(self.filemap.src.char_at(offset))
} else {
}
pub fn nextnextch(&self) -> Option<char> {
- let offset = self.byte_offset(self.pos).to_uint();
+ let offset = self.byte_offset(self.pos).to_usize();
let s = self.filemap.src.as_slice();
if offset >= s.len() { return None }
let str::CharRange { next, .. } = s.char_range_at(offset);
cmap.files.borrow_mut().push(self.filemap.clone());
let loc = cmap.lookup_char_pos_adj(self.last_pos);
debug!("Skipping a shebang");
- if loc.line == 1u && loc.col == CharPos(0u) {
+ if loc.line == 1us && loc.col == CharPos(0us) {
// FIXME: Add shebang "token", return it
let start = self.last_pos;
while !self.curr_is('\n') && !self.is_eof() { self.bump(); }
let is_doc_comment = self.curr_is('*') || self.curr_is('!');
let start_bpos = self.last_pos - BytePos(2);
- let mut level: int = 1;
+ let mut level: isize = 1;
let mut has_cr = false;
while level > 0 {
if self.is_eof() {
/// Scan through any digits (base `radix`) or underscores, and return how
/// many digits there were.
- fn scan_digits(&mut self, radix: uint) -> uint {
- let mut len = 0u;
+ fn scan_digits(&mut self, radix: usize) -> usize {
+ let mut len = 0us;
loop {
let c = self.curr;
if c == Some('_') { debug!("skipping a _"); self.bump(); continue; }
/// Scan over `n_digits` hex digits, stopping at `delim`, reporting an
/// error if too many or too few digits are encountered.
fn scan_hex_digits(&mut self,
- n_digits: uint,
+ n_digits: usize,
delim: char,
below_0x7f_only: bool)
-> bool {
if self.curr == Some('{') {
self.scan_unicode_escape(delim)
} else {
- let res = self.scan_hex_digits(4u, delim, false);
+ let res = self.scan_hex_digits(4us, delim, false);
let sp = codemap::mk_sp(escaped_pos, self.last_pos);
self.old_escape_warning(sp);
res
}
}
'U' if !ascii_only => {
- let res = self.scan_hex_digits(8u, delim, false);
+ let res = self.scan_hex_digits(8us, delim, false);
let sp = codemap::mk_sp(escaped_pos, self.last_pos);
self.old_escape_warning(sp);
res
fn scan_unicode_escape(&mut self, delim: char) -> bool {
self.bump(); // past the {
let start_bpos = self.last_pos;
- let mut count: uint = 0;
+ let mut count = 0us;
let mut accum_int = 0;
while !self.curr_is('}') && count <= 6 {
/// Check that a base is valid for a floating literal, emitting a nice
/// error if it isn't.
- fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: uint) {
+ fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
match base {
- 16u => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
- supported"),
- 8u => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"),
- 2u => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"),
- _ => ()
+ 16us => self.err_span_(start_bpos, last_bpos, "hexadecimal float literal is not \
+ supported"),
+ 8us => self.err_span_(start_bpos, last_bpos, "octal float literal is not supported"),
+ 2us => self.err_span_(start_bpos, last_bpos, "binary float literal is not supported"),
+ _ => ()
}
}
'r' => {
let start_bpos = self.last_pos;
self.bump();
- let mut hash_count = 0u;
+ let mut hash_count = 0us;
while self.curr_is('#') {
self.bump();
hash_count += 1;
fn scan_raw_byte_string(&mut self) -> token::Lit {
let start_bpos = self.last_pos;
self.bump();
- let mut hash_count = 0u;
+ let mut hash_count = 0us;
while self.curr_is('#') {
self.bump();
hash_count += 1;
test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10");
- assert_eq!(setup(&mk_sh(), "2u".to_string()).next_token().tok,
+ assert_eq!(setup(&mk_sh(), "2us".to_string()).next_token().tok,
token::Literal(token::Integer(token::intern("2")),
- Some(token::intern("u"))));
+ Some(token::intern("us"))));
assert_eq!(setup(&mk_sh(), "r###\"raw\"###suffix".to_string()).next_token().tok,
token::Literal(token::StrRaw(token::intern("raw"), 3),
Some(token::intern("suffix"))));
name,
source
);
- p.quote_depth += 1u;
+ p.quote_depth += 1us;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
}
let bytes = match File::open(path).read_to_end() {
Ok(bytes) => bytes,
Err(e) => {
- err(&format!("couldn't read {:?}: {:?}",
- path.display(),
- e)[]);
+ err(&format!("couldn't read {:?}: {}",
+ path.display(), e)[]);
unreachable!()
}
};
name,
source
);
- p.quote_depth += 1u;
+ p.quote_depth += 1us;
// right now this is re-creating the token trees from ... token trees.
maybe_aborted(p.parse_all_token_trees(),p)
}
/// Rather than just accepting/rejecting a given literal, unescapes it as
/// well. Can take any slice prefixed by a character escape. Returns the
/// character and the number of characters consumed.
-pub fn char_lit(lit: &str) -> (char, int) {
+pub fn char_lit(lit: &str) -> (char, isize) {
use std::{num, char};
let mut chars = lit.chars();
let msg = format!("lexer should have rejected a bad character escape {}", lit);
let msg2 = &msg[];
- fn esc(len: uint, lit: &str) -> Option<(char, int)> {
+ fn esc(len: usize, lit: &str) -> Option<(char, isize)> {
num::from_str_radix(&lit[2..len], 16)
.and_then(char::from_u32)
- .map(|x| (x, len as int))
+ .map(|x| (x, len as isize))
}
- let unicode_escape = |&: | -> Option<(char, int)>
+ let unicode_escape = |&: | -> Option<(char, isize)>
if lit.as_bytes()[2] == b'{' {
let idx = lit.find('}').expect(msg2);
let subslice = &lit[3..idx];
num::from_str_radix(subslice, 16)
.and_then(char::from_u32)
- .map(|x| (x, subslice.chars().count() as int + 4))
+ .map(|x| (x, subslice.chars().count() as isize + 4))
} else {
esc(6, lit)
};
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a>(it: &mut iter::Peekable<(uint, char), str::CharIndices<'a>>) {
+ fn eat<'a>(it: &mut iter::Peekable<(usize, char), str::CharIndices<'a>>) {
loop {
match it.peek().map(|x| x.1) {
Some(' ') | Some('\n') | Some('\r') | Some('\t') => {
}
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
-pub fn byte_lit(lit: &str) -> (u8, uint) {
+pub fn byte_lit(lit: &str) -> (u8, usize) {
let err = |&: i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
if lit.len() == 1 {
(lit.as_bytes()[0], 1)
} else {
- assert!(lit.as_bytes()[0] == b'\\', err(0i));
+ assert!(lit.as_bytes()[0] == b'\\', err(0is));
let b = match lit.as_bytes()[1] {
b'"' => b'"',
b'n' => b'\n',
let error = |&: i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a, I: Iterator<Item=(uint, u8)>>(it: &mut iter::Peekable<(uint, u8), I>) {
+ fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<(usize, u8), I>) {
loop {
match it.peek().map(|x| x.1) {
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
match suffix {
Some(suf) if looks_like_width_suffix(&['f'], suf) => {
match base {
- 16u => sd.span_err(sp, "hexadecimal float literal is not supported"),
- 8u => sd.span_err(sp, "octal float literal is not supported"),
- 2u => sd.span_err(sp, "binary float literal is not supported"),
+ 16us => sd.span_err(sp, "hexadecimal float literal is not supported"),
+ 8us => sd.span_err(sp, "octal float literal is not supported"),
+ 2us => sd.span_err(sp, "binary float literal is not supported"),
_ => ()
}
let ident = token::intern_and_get_ident(&*s);
#[test]
fn string_to_tts_1 () {
- let tts = string_to_tts("fn a (b : int) { b; }".to_string());
+ let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
assert_eq!(json::encode(&tts),
"[\
{\
{\
\"variant\":\"Ident\",\
\"fields\":[\
- \"int\",\
+ \"i32\",\
\"Plain\"\
]\
}\
// check the contents of the tt manually:
#[test] fn parse_fundecl () {
- // this test depends on the intern order of "fn" and "int"
- assert!(string_to_item("fn a (b : int) { b; }".to_string()) ==
+ // this test depends on the intern order of "fn" and "i32"
+ assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
Some(
P(ast::Item{ident:str_to_ident("a"),
attrs:Vec::new(),
segments: vec!(
ast::PathSegment {
identifier:
- str_to_ident("int"),
+ str_to_ident("i32"),
parameters: ast::PathParameters::none(),
}
),
#[test] fn span_of_self_arg_pat_idents_are_correct() {
- let srcs = ["impl z { fn a (&self, &myarg: int) {} }",
- "impl z { fn a (&mut self, &myarg: int) {} }",
- "impl z { fn a (&'a self, &myarg: int) {} }",
- "impl z { fn a (self, &myarg: int) {} }",
- "impl z { fn a (self: Foo, &myarg: int) {} }",
+ let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
+ "impl z { fn a (&mut self, &myarg: i32) {} }",
+ "impl z { fn a (&'a self, &myarg: i32) {} }",
+ "impl z { fn a (self, &myarg: i32) {} }",
+ "impl z { fn a (self: Foo, &myarg: i32) {} }",
];
for &src in srcs.iter() {
let spans = get_spans_of_pat_idents(src);
let Span{ lo, hi, .. } = spans[0];
- assert!("self" == &src[lo.to_uint()..hi.to_uint()],
+ assert!("self" == &src[lo.to_usize()..hi.to_usize()],
"\"{}\" != \"self\". src=\"{}\"",
- &src[lo.to_uint()..hi.to_uint()], src)
+ &src[lo.to_usize()..hi.to_usize()], src)
}
}
"use a `move ||` expression instead",
),
ObsoleteSyntax::ClosureType => (
- "`|uint| -> bool` closure type syntax",
+ "`|usize| -> bool` closure type syntax",
"use unboxed closures instead, no type annotation needed"
),
ObsoleteSyntax::Sized => (
/// the previous token or None (only stashed sometimes).
pub last_token: Option<Box<token::Token>>,
pub buffer: [TokenAndSpan; 4],
- pub buffer_start: int,
- pub buffer_end: int,
- pub tokens_consumed: uint,
+ pub buffer_start: isize,
+ pub buffer_end: isize,
+ pub tokens_consumed: usize,
pub restrictions: Restrictions,
- pub quote_depth: uint, // not (yet) related to the quasiquoter
+ pub quote_depth: usize, // not (yet) related to the quasiquoter
pub reader: Box<Reader+'a>,
pub interner: Rc<token::IdentInterner>,
/// The set of seen errors about obsolete syntax. Used to suppress
// would encounter a `>` and stop. This lets the parser handle trailing
// commas in generic parameters, because it can stop either after
// parsing a type or after parsing a comma.
- for i in iter::count(0u, 1) {
+ for i in iter::count(0us, 1) {
if self.check(&token::Gt)
|| self.token == token::BinOp(token::Shr)
|| self.token == token::Ge
self.reader.real_token()
} else {
// Avoid token copies with `replace`.
- let buffer_start = self.buffer_start as uint;
- let next_index = (buffer_start + 1) & 3 as uint;
- self.buffer_start = next_index as int;
+ let buffer_start = self.buffer_start as usize;
+ let next_index = (buffer_start + 1) & 3 as usize;
+ self.buffer_start = next_index as isize;
let placeholder = TokenAndSpan {
tok: token::Underscore,
};
self.span = next.sp;
self.token = next.tok;
- self.tokens_consumed += 1u;
+ self.tokens_consumed += 1us;
self.expected_tokens.clear();
// check after each token
self.check_unknown_macro_variable();
self.token = next;
self.span = mk_sp(lo, hi);
}
- pub fn buffer_length(&mut self) -> int {
+ pub fn buffer_length(&mut self) -> isize {
if self.buffer_start <= self.buffer_end {
return self.buffer_end - self.buffer_start;
}
return (4 - self.buffer_start) + self.buffer_end;
}
- pub fn look_ahead<R, F>(&mut self, distance: uint, f: F) -> R where
+ pub fn look_ahead<R, F>(&mut self, distance: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
- let dist = distance as int;
+ let dist = distance as isize;
while self.buffer_length() < dist {
- self.buffer[self.buffer_end as uint] = self.reader.real_token();
+ self.buffer[self.buffer_end as usize] = self.reader.real_token();
self.buffer_end = (self.buffer_end + 1) & 3;
}
- f(&self.buffer[((self.buffer_start + dist - 1) & 3) as uint].tok)
+ f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok)
}
pub fn fatal(&mut self, m: &str) -> ! {
self.sess.span_diagnostic.span_fatal(self.span, m)
self.expect(&token::OpenDelim(token::Bracket));
let t = self.parse_ty_sum();
- // Parse the `; e` in `[ int; e ]`
+ // Parse the `; e` in `[ i32; e ]`
// where `e` is a const expression
let t = match self.maybe_parse_fixed_length_of_vec() {
None => TyVec(t),
ExprField(expr, ident)
}
- pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<uint>) -> ast::Expr_ {
+ pub fn mk_tup_field(&mut self, expr: P<Expr>, idx: codemap::Spanned<usize>) -> ast::Expr_ {
ExprTupField(expr, idx)
}
hi = self.span.hi;
self.bump();
- let index = n.as_str().parse::<uint>();
+ let index = n.as_str().parse::<usize>();
match index {
Some(n) => {
let id = spanned(dot, hi, n);
};
self.span_help(last_span,
&format!("try parenthesizing the first index; e.g., `(foo.{}){}`",
- float.trunc() as uint,
+ float.trunc() as usize,
&float.fract().to_string()[1..])[]);
}
self.abort_if_errors();
}
pub fn check_unknown_macro_variable(&mut self) {
- if self.quote_depth == 0u {
+ if self.quote_depth == 0us {
match self.token {
token::SubstNt(name, _) =>
self.fatal(&format!("unknown macro variable `{}`",
token_str)[])
},
/* we ought to allow different depths of unquotation */
- token::Dollar | token::SubstNt(..) if p.quote_depth > 0u => {
+ token::Dollar | token::SubstNt(..) if p.quote_depth > 0us => {
p.parse_unquoted()
}
_ => {
}
/// Parse an expression of binops of at least min_prec precedence
- pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: uint) -> P<Expr> {
+ pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> P<Expr> {
if self.expr_is_complete(&*lhs) { return lhs; }
// Prevent dynamic borrow errors later on by limiting the
"Chained comparison operators require parentheses");
if op == BiLt && outer_op == BiGt {
self.span_help(op_span,
- "Use ::< instead of < if you meant to specify type arguments.");
+ "use ::< instead of < if you meant to specify type arguments");
}
}
_ => {}
Some(attrs))
}
- /// Parse a::B<String,int>
+ /// Parse a::B<String,i32>
fn parse_trait_ref(&mut self) -> TraitRef {
ast::TraitRef {
path: self.parse_path(LifetimeAndTypesWithoutColons),
}
}
- /// Parse for<'l> a::B<String,int>
+ /// Parse for<'l> a::B<String,i32>
fn parse_poly_trait_ref(&mut self) -> PolyTraitRef {
let lifetime_defs = self.parse_late_bound_lifetime_defs();
}
}
- if first && attrs_remaining_len > 0u {
+ if first && attrs_remaining_len > 0us {
// We parsed attributes for the first item but didn't find it
let last_span = self.last_span;
self.span_err(last_span,
return IoviItem(item);
}
if self.token.is_keyword(keywords::Unsafe) &&
- self.look_ahead(1u, |t| t.is_keyword(keywords::Trait))
+ self.look_ahead(1us, |t| t.is_keyword(keywords::Trait))
{
// UNSAFE TRAIT ITEM
self.expect_keyword(keywords::Unsafe);
return IoviItem(item);
}
if self.token.is_keyword(keywords::Unsafe) &&
- self.look_ahead(1u, |t| t.is_keyword(keywords::Impl))
+ self.look_ahead(1us, |t| t.is_keyword(keywords::Impl))
{
// IMPL ITEM
self.expect_keyword(keywords::Unsafe);
return IoviItem(item);
}
if self.token.is_keyword(keywords::Unsafe)
- && self.look_ahead(1u, |t| *t != token::OpenDelim(token::Brace)) {
+ && self.look_ahead(1us, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
self.bump();
let abi = if self.eat_keyword(keywords::Extern) {
}
}
}
- let mut rename_to = path[path.len() - 1u];
+ let mut rename_to = path[path.len() - 1us];
let path = ast::Path {
span: mk_sp(lo, self.last_span.hi),
global: false,
Integer(ast::Name),
Float(ast::Name),
Str_(ast::Name),
- StrRaw(ast::Name, uint), /* raw str delimited by n hash symbols */
+ StrRaw(ast::Name, usize), /* raw str delimited by n hash symbols */
Binary(ast::Name),
- BinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */
+ BinaryRaw(ast::Name, usize), /* raw binary str delimited by n hash symbols */
}
impl Lit {
get_ident_interner().intern(s)
}
-/// gensym's a new uint, using the current interner.
+/// gensym's a new usize, using the current interner.
#[inline]
pub fn gensym(s: &str) -> ast::Name {
get_ident_interner().gensym(s)
// create a fresh mark.
pub fn fresh_mark() -> ast::Mrk {
- gensym("mark").uint() as u32
+ gensym("mark").usize() as u32
}
#[cfg(test)]
//!
//! In particular you'll see a certain amount of churn related to INTEGER vs.
//! CARDINAL in the Mesa implementation. Mesa apparently interconverts the two
-//! somewhat readily? In any case, I've used uint for indices-in-buffers and
+//! somewhat readily? In any case, I've used usize for indices-in-buffers and
//! ints for character-sizes-and-indentation-offsets. This respects the need
//! for ints to "go negative" while carrying a pending-calculation balance, and
//! helps differentiate all the numbers flying around internally (slightly).
#[derive(Clone, Copy)]
pub struct BreakToken {
- offset: int,
- blank_space: int
+ offset: isize,
+ blank_space: isize
}
#[derive(Clone, Copy)]
pub struct BeginToken {
- offset: int,
+ offset: isize,
breaks: Breaks
}
#[derive(Clone)]
pub enum Token {
- String(String, int),
+ String(String, isize),
Break(BreakToken),
Begin(BeginToken),
End,
}
pub fn buf_str(toks: &[Token],
- szs: &[int],
- left: uint,
- right: uint,
- lim: uint)
+ szs: &[isize],
+ left: usize,
+ right: usize,
+ lim: usize)
-> String {
let n = toks.len();
assert_eq!(n, szs.len());
let mut i = left;
let mut l = lim;
let mut s = string::String::from_str("[");
- while i != right && l != 0u {
- l -= 1u;
+ while i != right && l != 0us {
+ l -= 1us;
if i != left {
s.push_str(", ");
}
s.push_str(&format!("{}={}",
szs[i],
tok_str(&toks[i]))[]);
- i += 1u;
+ i += 1us;
i %= n;
}
s.push(']');
#[derive(Copy)]
pub struct PrintStackElem {
- offset: int,
+ offset: isize,
pbreak: PrintStackBreak
}
-static SIZE_INFINITY: int = 0xffff;
+static SIZE_INFINITY: isize = 0xffff;
-pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: uint) -> Printer {
+pub fn mk_printer(out: Box<io::Writer+'static>, linewidth: usize) -> Printer {
// Yes 3, it makes the ring buffers big enough to never
// fall behind.
- let n: uint = 3 * linewidth;
+ let n: usize = 3 * linewidth;
debug!("mk_printer {}", linewidth);
let token: Vec<Token> = repeat(Token::Eof).take(n).collect();
- let size: Vec<int> = repeat(0i).take(n).collect();
- let scan_stack: Vec<uint> = repeat(0u).take(n).collect();
+ let size: Vec<isize> = repeat(0is).take(n).collect();
+ let scan_stack: Vec<usize> = repeat(0us).take(n).collect();
Printer {
out: out,
buf_len: n,
- margin: linewidth as int,
- space: linewidth as int,
+ margin: linewidth as isize,
+ space: linewidth as isize,
left: 0,
right: 0,
token: token,
/// called 'print'.
pub struct Printer {
pub out: Box<io::Writer+'static>,
- buf_len: uint,
+ buf_len: usize,
/// Width of lines we're constrained to
- margin: int,
+ margin: isize,
/// Number of spaces left on line
- space: int,
+ space: isize,
/// Index of left side of input stream
- left: uint,
+ left: usize,
/// Index of right side of input stream
- right: uint,
+ right: usize,
/// Ring-buffer stream goes through
token: Vec<Token> ,
/// Ring-buffer of calculated sizes
- size: Vec<int> ,
+ size: Vec<isize> ,
/// Running size of stream "...left"
- left_total: int,
+ left_total: isize,
/// Running size of stream "...right"
- right_total: int,
+ right_total: isize,
/// Pseudo-stack, really a ring too. Holds the
/// primary-ring-buffers index of the Begin that started the
/// current block, possibly with the most recent Break after that
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
- scan_stack: Vec<uint> ,
+ scan_stack: Vec<usize> ,
/// Top==bottom disambiguator
scan_stack_empty: bool,
/// Index of top of scan_stack
- top: uint,
+ top: usize,
/// Index of bottom of scan_stack
- bottom: uint,
+ bottom: usize,
/// Stack of blocks-in-progress being flushed by print
print_stack: Vec<PrintStackElem> ,
/// Buffered indentation to avoid writing trailing whitespace
- pending_indentation: int,
+ pending_indentation: isize,
}
impl Printer {
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
- self.left = 0u;
- self.right = 0u;
+ self.left = 0us;
+ self.right = 0us;
} else { self.advance_right(); }
debug!("pp Begin({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
if self.scan_stack_empty {
self.left_total = 1;
self.right_total = 1;
- self.left = 0u;
- self.right = 0u;
+ self.left = 0us;
+ self.right = 0us;
} else { self.advance_right(); }
debug!("pp Break({})/buffer ~[{},{}]",
b.offset, self.left, self.right);
}
Ok(())
}
- pub fn scan_push(&mut self, x: uint) {
+ pub fn scan_push(&mut self, x: usize) {
debug!("scan_push {}", x);
if self.scan_stack_empty {
self.scan_stack_empty = false;
} else {
- self.top += 1u;
+ self.top += 1us;
self.top %= self.buf_len;
assert!((self.top != self.bottom));
}
self.scan_stack[self.top] = x;
}
- pub fn scan_pop(&mut self) -> uint {
+ pub fn scan_pop(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.top];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
- self.top += self.buf_len - 1u; self.top %= self.buf_len;
+ self.top += self.buf_len - 1us; self.top %= self.buf_len;
}
return x;
}
- pub fn scan_top(&mut self) -> uint {
+ pub fn scan_top(&mut self) -> usize {
assert!((!self.scan_stack_empty));
return self.scan_stack[self.top];
}
- pub fn scan_pop_bottom(&mut self) -> uint {
+ pub fn scan_pop_bottom(&mut self) -> usize {
assert!((!self.scan_stack_empty));
let x = self.scan_stack[self.bottom];
if self.top == self.bottom {
self.scan_stack_empty = true;
} else {
- self.bottom += 1u; self.bottom %= self.buf_len;
+ self.bottom += 1us; self.bottom %= self.buf_len;
}
return x;
}
pub fn advance_right(&mut self) {
- self.right += 1u;
+ self.right += 1us;
self.right %= self.buf_len;
assert!((self.right != self.left));
}
break;
}
- self.left += 1u;
+ self.left += 1us;
self.left %= self.buf_len;
left_size = self.size[self.left];
Ok(())
}
- pub fn check_stack(&mut self, k: int) {
+ pub fn check_stack(&mut self, k: isize) {
if !self.scan_stack_empty {
let x = self.scan_top();
match self.token[x] {
}
}
}
- pub fn print_newline(&mut self, amount: int) -> io::IoResult<()> {
+ pub fn print_newline(&mut self, amount: isize) -> io::IoResult<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
self.pending_indentation = 0;
self.indent(amount);
return ret;
}
- pub fn indent(&mut self, amount: int) {
+ pub fn indent(&mut self, amount: isize) {
debug!("INDENT {}", amount);
self.pending_indentation += amount;
}
pub fn get_top(&mut self) -> PrintStackElem {
let print_stack = &mut self.print_stack;
let n = print_stack.len();
- if n != 0u {
+ if n != 0us {
(*print_stack)[n - 1]
} else {
PrintStackElem {
}
write!(self.out, "{}", s)
}
- pub fn print(&mut self, token: Token, l: int) -> io::IoResult<()> {
+ pub fn print(&mut self, token: Token, l: isize) -> io::IoResult<()> {
debug!("print {} {} (remaining line space={})", tok_str(&token), l,
self.space);
debug!("{}", buf_str(&self.token[],
Token::End => {
debug!("print End -> pop End");
let print_stack = &mut self.print_stack;
- assert!((print_stack.len() != 0u));
+ assert!((print_stack.len() != 0us));
print_stack.pop().unwrap();
Ok(())
}
// Convenience functions to talk to the printer.
//
// "raw box"
-pub fn rbox(p: &mut Printer, indent: uint, b: Breaks) -> io::IoResult<()> {
+pub fn rbox(p: &mut Printer, indent: usize, b: Breaks) -> io::IoResult<()> {
p.pretty_print(Token::Begin(BeginToken {
- offset: indent as int,
+ offset: indent as isize,
breaks: b
}))
}
-pub fn ibox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
+pub fn ibox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
rbox(p, indent, Breaks::Inconsistent)
}
-pub fn cbox(p: &mut Printer, indent: uint) -> io::IoResult<()> {
+pub fn cbox(p: &mut Printer, indent: usize) -> io::IoResult<()> {
rbox(p, indent, Breaks::Consistent)
}
-pub fn break_offset(p: &mut Printer, n: uint, off: int) -> io::IoResult<()> {
+pub fn break_offset(p: &mut Printer, n: usize, off: isize) -> io::IoResult<()> {
p.pretty_print(Token::Break(BreakToken {
offset: off,
- blank_space: n as int
+ blank_space: n as isize
}))
}
}
pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
- p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as int))
+ p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize))
}
pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0))
}
-pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> {
+pub fn spaces(p: &mut Printer, n: usize) -> io::IoResult<()> {
break_offset(p, n, 0)
}
pub fn zerobreak(p: &mut Printer) -> io::IoResult<()> {
- spaces(p, 0u)
+ spaces(p, 0us)
}
pub fn space(p: &mut Printer) -> io::IoResult<()> {
- spaces(p, 1u)
+ spaces(p, 1us)
}
pub fn hardbreak(p: &mut Printer) -> io::IoResult<()> {
- spaces(p, SIZE_INFINITY as uint)
+ spaces(p, SIZE_INFINITY as usize)
}
-pub fn hardbreak_tok_offset(off: int) -> Token {
+pub fn hardbreak_tok_offset(off: isize) -> Token {
Token::Break(BreakToken {offset: off, blank_space: SIZE_INFINITY})
}
#[derive(Copy)]
pub struct CurrentCommentAndLiteral {
- cur_cmnt: uint,
- cur_lit: uint,
+ cur_cmnt: usize,
+ cur_lit: usize,
}
pub struct State<'a> {
}
#[allow(non_upper_case_globals)]
-pub const indent_unit: uint = 4u;
+pub const indent_unit: usize = 4us;
#[allow(non_upper_case_globals)]
-pub const default_columns: uint = 78u;
+pub const default_columns: usize = 78us;
/// Requires you to pass an input filename and reader so that
/// it can scan the input text for comments and literals to
// containing cbox, will be closed by print-block at }
try!(s.cbox(indent_unit));
// head-ibox, will be closed by print-block after {
- try!(s.ibox(0u));
+ try!(s.ibox(0us));
s.print_block(blk)
})
}
}
impl<'a> State<'a> {
- pub fn ibox(&mut self, u: uint) -> IoResult<()> {
+ pub fn ibox(&mut self, u: usize) -> IoResult<()> {
self.boxes.push(pp::Breaks::Inconsistent);
pp::ibox(&mut self.s, u)
}
pp::end(&mut self.s)
}
- pub fn cbox(&mut self, u: uint) -> IoResult<()> {
+ pub fn cbox(&mut self, u: usize) -> IoResult<()> {
self.boxes.push(pp::Breaks::Consistent);
pp::cbox(&mut self.s, u)
}
// "raw box"
- pub fn rbox(&mut self, u: uint, b: pp::Breaks) -> IoResult<()> {
+ pub fn rbox(&mut self, u: usize, b: pp::Breaks) -> IoResult<()> {
self.boxes.push(b);
pp::rbox(&mut self.s, u, b)
}
}
pub fn bclose_(&mut self, span: codemap::Span,
- indented: uint) -> IoResult<()> {
+ indented: usize) -> IoResult<()> {
self.bclose_maybe_open(span, indented, true)
}
pub fn bclose_maybe_open (&mut self, span: codemap::Span,
- indented: uint, close_box: bool) -> IoResult<()> {
+ indented: usize, close_box: bool) -> IoResult<()> {
try!(self.maybe_print_comment(span.hi));
- try!(self.break_offset_if_not_bol(1u, -(indented as int)));
+ try!(self.break_offset_if_not_bol(1us, -(indented as isize)));
try!(word(&mut self.s, "}"));
if close_box {
try!(self.end()); // close the outer-box
if !self.is_bol() { try!(space(&mut self.s)); }
Ok(())
}
- pub fn break_offset_if_not_bol(&mut self, n: uint,
- off: int) -> IoResult<()> {
+ pub fn break_offset_if_not_bol(&mut self, n: usize,
+ off: isize) -> IoResult<()> {
if !self.is_bol() {
break_offset(&mut self.s, n, off)
} else {
pub fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> IoResult<()> where
F: FnMut(&mut State, &T) -> IoResult<()>,
{
- try!(self.rbox(0u, b));
+ try!(self.rbox(0us, b));
let mut first = true;
for elt in elts.iter() {
if first { first = false; } else { try!(self.word_space(",")); }
F: FnMut(&mut State, &T) -> IoResult<()>,
G: FnMut(&T) -> codemap::Span,
{
- try!(self.rbox(0u, b));
+ try!(self.rbox(0us, b));
let len = elts.len();
- let mut i = 0u;
+ let mut i = 0us;
for elt in elts.iter() {
try!(self.maybe_print_comment(get_span(elt).hi));
try!(op(self, elt));
- i += 1u;
+ i += 1us;
if i < len {
try!(word(&mut self.s, ","));
try!(self.maybe_print_trailing_comment(get_span(elt),
pub fn print_type(&mut self, ty: &ast::Ty) -> IoResult<()> {
try!(self.maybe_print_comment(ty.span.lo));
- try!(self.ibox(0u));
+ try!(self.ibox(0us));
match ty.node {
ast::TyVec(ref ty) => {
try!(word(&mut self.s, "["));
}
ast::ItemTy(ref ty, ref params) => {
try!(self.ibox(indent_unit));
- try!(self.ibox(0u));
+ try!(self.ibox(0us));
try!(self.word_nbsp(&visibility_qualified(item.vis, "type")[]));
try!(self.print_ident(item.ident));
try!(self.print_generics(params));
pub fn print_outer_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
- let mut count = 0u;
+ let mut count = 0us;
for attr in attrs.iter() {
match attr.node.style {
ast::AttrOuter => {
pub fn print_inner_attributes(&mut self,
attrs: &[ast::Attribute]) -> IoResult<()> {
- let mut count = 0u;
+ let mut count = 0us;
for attr in attrs.iter() {
match attr.node.style {
ast::AttrInner => {
}
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
- indented: uint) -> IoResult<()> {
+ indented: usize) -> IoResult<()> {
self.print_block_maybe_unclosed(blk, indented, &[], false)
}
pub fn print_block_maybe_unclosed(&mut self,
blk: &ast::Block,
- indented: uint,
+ indented: usize,
attrs: &[ast::Attribute],
close_box: bool) -> IoResult<()> {
match blk.rules {
match _else.node {
// "another else-if"
ast::ExprIf(ref i, ref then, ref e) => {
- try!(self.cbox(indent_unit - 1u));
- try!(self.ibox(0u));
+ try!(self.cbox(indent_unit - 1us));
+ try!(self.ibox(0us));
try!(word(&mut self.s, " else if "));
try!(self.print_expr(&**i));
try!(space(&mut self.s));
}
// "another else-if-let"
ast::ExprIfLet(ref pat, ref expr, ref then, ref e) => {
- try!(self.cbox(indent_unit - 1u));
- try!(self.ibox(0u));
+ try!(self.cbox(indent_unit - 1us));
+ try!(self.ibox(0us));
try!(word(&mut self.s, " else if let "));
try!(self.print_pat(&**pat));
try!(space(&mut self.s));
}
// "final else"
ast::ExprBlock(ref b) => {
- try!(self.cbox(indent_unit - 1u));
- try!(self.ibox(0u));
+ try!(self.cbox(indent_unit - 1us));
+ try!(self.ibox(0us));
try!(word(&mut self.s, " else "));
self.print_block(&**b)
}
try!(self.print_expr(&*args[0]));
try!(word(&mut self.s, "."));
try!(self.print_ident(ident.node));
- if tys.len() > 0u {
+ if tys.len() > 0us {
try!(word(&mut self.s, "::<"));
try!(self.commasep(Inconsistent, tys,
|s, ty| s.print_type(&**ty)));
// containing cbox, will be closed by print-block at }
try!(self.cbox(indent_unit));
// head-box, will be closed by print-block after {
- try!(self.ibox(0u));
+ try!(self.ibox(0us));
try!(self.print_block(&**blk));
}
ast::ExprAssign(ref lhs, ref rhs) => {
ast::ExprTupField(ref expr, id) => {
try!(self.print_expr(&**expr));
try!(word(&mut self.s, "."));
- try!(self.print_uint(id.node));
+ try!(self.print_usize(id.node));
}
ast::ExprIndex(ref expr, ref index) => {
try!(self.print_expr(&**expr));
self.ann.post(self, NodeIdent(&ident))
}
- pub fn print_uint(&mut self, i: uint) -> IoResult<()> {
+ pub fn print_usize(&mut self, i: usize) -> IoResult<()> {
word(&mut self.s, &i.to_string()[])
}
},
|f| f.node.pat.span));
if etc {
- if fields.len() != 0u { try!(self.word_space(",")); }
+ if fields.len() != 0us { try!(self.word_space(",")); }
try!(word(&mut self.s, ".."));
}
try!(space(&mut self.s));
try!(space(&mut self.s));
}
try!(self.cbox(indent_unit));
- try!(self.ibox(0u));
+ try!(self.ibox(0us));
try!(self.print_outer_attributes(&arm.attrs[]));
let mut first = true;
for p in arm.pats.iter() {
-> IoResult<()> {
// It is unfortunate to duplicate the commasep logic, but we want the
// self type and the args all in the same box.
- try!(self.rbox(0u, Inconsistent));
+ try!(self.rbox(0us, Inconsistent));
let mut first = true;
for &explicit_self in opt_explicit_self.iter() {
let m = match explicit_self {
try!(word(&mut self.s, "<"));
let mut ints = Vec::new();
- for i in range(0u, total) {
+ for i in range(0us, total) {
ints.push(i);
}
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
span_line.line == comment_line.line {
try!(self.print_comment(cmnt));
- self.cur_cmnt_and_lit.cur_cmnt += 1u;
+ self.cur_cmnt_and_lit.cur_cmnt += 1us;
}
}
_ => ()
match self.next_comment() {
Some(ref cmnt) => {
try!(self.print_comment(cmnt));
- self.cur_cmnt_and_lit.cur_cmnt += 1u;
+ self.cur_cmnt_and_lit.cur_cmnt += 1us;
}
_ => break
}
while self.cur_cmnt_and_lit.cur_lit < lits.len() {
let ltrl = (*lits)[self.cur_cmnt_and_lit.cur_lit].clone();
if ltrl.pos > pos { return None; }
- self.cur_cmnt_and_lit.cur_lit += 1u;
+ self.cur_cmnt_and_lit.cur_lit += 1us;
if ltrl.pos == pos { return Some(ltrl); }
}
None
Some(ref cmnt) => {
if (*cmnt).pos < pos {
try!(self.print_comment(cmnt));
- self.cur_cmnt_and_lit.cur_cmnt += 1u;
+ self.cur_cmnt_and_lit.cur_cmnt += 1us;
} else { break; }
}
_ => break
cmnt: &comments::Comment) -> IoResult<()> {
match cmnt.style {
comments::Mixed => {
- assert_eq!(cmnt.lines.len(), 1u);
+ assert_eq!(cmnt.lines.len(), 1us);
try!(zerobreak(&mut self.s));
try!(word(&mut self.s, &cmnt.lines[0][]));
zerobreak(&mut self.s)
}
comments::Trailing => {
try!(word(&mut self.s, " "));
- if cmnt.lines.len() == 1u {
+ if cmnt.lines.len() == 1us {
try!(word(&mut self.s, &cmnt.lines[0][]));
hardbreak(&mut self.s)
} else {
- try!(self.ibox(0u));
+ try!(self.ibox(0us));
for line in cmnt.lines.iter() {
if !line.is_empty() {
try!(word(&mut self.s, &line[]));
}
}
-fn repeat(s: &str, n: uint) -> String { iter::repeat(s).take(n).collect() }
+fn repeat(s: &str, n: usize) -> String { iter::repeat(s).take(n).collect() }
#[cfg(test)]
mod test {
let tparm_cnt = generics.ty_params.len();
// NB: inadequate check, but we're running
// well before resolve, can't get too deep.
- input_cnt == 1u
- && no_output && tparm_cnt == 0u
+ input_cnt == 1us
+ && no_output && tparm_cnt == 0us
}
_ => false
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! An "interner" is a data structure that associates values with uint tags and
+//! An "interner" is a data structure that associates values with usize tags and
//! allows bidirectional lookup; i.e. given a value, one can easily find the
//! type, and vice versa.
pub fn get(&self, idx: Name) -> T {
let vect = self.vect.borrow();
- (*vect)[idx.uint()].clone()
+ (*vect)[idx.usize()].clone()
}
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
let vect = self.vect.borrow();
(*vect).len()
}
let new_idx = Name(self.len() as u32);
// leave out of map to avoid colliding
let mut vect = self.vect.borrow_mut();
- let existing = (*vect)[idx.uint()].clone();
+ let existing = (*vect)[idx.usize()].clone();
vect.push(existing);
new_idx
}
pub fn get(&self, idx: Name) -> RcStr {
- (*self.vect.borrow())[idx.uint()].clone()
+ (*self.vect.borrow())[idx.usize()].clone()
}
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
self.vect.borrow().len()
}
}
}
-/// Given a string and an index, return the first uint >= idx
+/// Given a string and an index, return the first usize >= idx
/// that is a non-ws-char or is outside of the legal range of
/// the string.
-fn scan_for_non_ws_or_end(a : &str, idx: uint) -> uint {
+fn scan_for_non_ws_or_end(a : &str, idx: usize) -> usize {
let mut i = idx;
let len = a.len();
while (i < len) && (is_whitespace(a.char_at(i))) {
}
}
- pub fn get<'a>(&'a self, idx: uint) -> &'a T {
+ pub fn get<'a>(&'a self, idx: usize) -> &'a T {
match self.repr {
One(ref v) if idx == 0 => v,
Many(ref vs) => &vs[idx],
IntoIter { repr: repr }
}
- pub fn len(&self) -> uint {
+ pub fn len(&self) -> usize {
match self.repr {
Zero => 0,
One(..) => 1,
}
}
- fn size_hint(&self) -> (uint, Option<uint>) {
+ fn size_hint(&self) -> (usize, Option<usize>) {
match self.repr {
ZeroIterator => (0, Some(0)),
OneIterator(..) => (1, Some(1)),
#[test]
fn test_len() {
- let v: SmallVector<int> = SmallVector::zero();
+ let v: SmallVector<isize> = SmallVector::zero();
assert_eq!(0, v.len());
- assert_eq!(1, SmallVector::one(1i).len());
- assert_eq!(5, SmallVector::many(vec!(1i, 2, 3, 4, 5)).len());
+ assert_eq!(1, SmallVector::one(1is).len());
+ assert_eq!(5, SmallVector::many(vec!(1is, 2, 3, 4, 5)).len());
}
#[test]
fn test_push_get() {
let mut v = SmallVector::zero();
- v.push(1i);
+ v.push(1is);
assert_eq!(1, v.len());
assert_eq!(&1, v.get(0));
v.push(2);
#[test]
fn test_from_iter() {
- let v: SmallVector<int> = (vec!(1i, 2, 3)).into_iter().collect();
+ let v: SmallVector<isize> = (vec![1is, 2, 3]).into_iter().collect();
assert_eq!(3, v.len());
assert_eq!(&1, v.get(0));
assert_eq!(&2, v.get(1));
#[test]
fn test_move_iter() {
let v = SmallVector::zero();
- let v: Vec<int> = v.into_iter().collect();
+ let v: Vec<isize> = v.into_iter().collect();
assert_eq!(Vec::new(), v);
- let v = SmallVector::one(1i);
- assert_eq!(vec!(1i), v.into_iter().collect::<Vec<_>>());
+ let v = SmallVector::one(1is);
+ assert_eq!(vec!(1is), v.into_iter().collect::<Vec<_>>());
- let v = SmallVector::many(vec!(1i, 2i, 3i));
- assert_eq!(vec!(1i, 2i, 3i), v.into_iter().collect::<Vec<_>>());
+ let v = SmallVector::many(vec!(1is, 2is, 3is));
+ assert_eq!(vec!(1is, 2is, 3is), v.into_iter().collect::<Vec<_>>());
}
#[test]
#[should_fail]
fn test_expect_one_zero() {
- let _: int = SmallVector::zero().expect_one("");
+ let _: isize = SmallVector::zero().expect_one("");
}
#[test]
#[should_fail]
fn test_expect_one_many() {
- SmallVector::many(vec!(1i, 2)).expect_one("");
+ SmallVector::many(vec!(1is, 2)).expect_one("");
}
#[test]
fn test_expect_one_one() {
- assert_eq!(1i, SmallVector::one(1i).expect_one(""));
- assert_eq!(1i, SmallVector::many(vec!(1i)).expect_one(""));
+ assert_eq!(1is, SmallVector::one(1is).expect_one(""));
+ assert_eq!(1is, SmallVector::many(vec!(1is)).expect_one(""));
}
}
// Find the offset of the NUL we want to go to
- let nulpos = string_table[(offset as uint) .. (string_table_bytes as uint)]
+ let nulpos = string_table[offset as uint .. string_table_bytes as uint]
.iter().position(|&b| b == 0);
match nulpos {
Some(len) => {
string_map.insert(name.to_string(),
- string_table[(offset as uint) ..
+ string_table[offset as uint ..
(offset as uint + len)].to_vec())
},
None => {
/// Manager of the benchmarking runs.
///
-/// This is feed into functions marked with `#[bench]` to allow for
+/// This is fed into functions marked with `#[bench]` to allow for
/// set-up & tear-down before running a piece of code repeatedly via a
/// call to `iter`.
#[derive(Copy)]
pub run_ignored: bool,
pub run_tests: bool,
pub run_benchmarks: bool,
- pub ratchet_metrics: Option<Path>,
- pub ratchet_noise_percent: Option<f64>,
- pub save_metrics: Option<Path>,
- pub test_shard: Option<(uint,uint)>,
pub logfile: Option<Path>,
pub nocapture: bool,
pub color: ColorConfig,
- pub show_boxplot: bool,
- pub boxplot_width: uint,
- pub show_all_stats: bool,
}
impl TestOpts {
run_ignored: false,
run_tests: false,
run_benchmarks: false,
- ratchet_metrics: None,
- ratchet_noise_percent: None,
- save_metrics: None,
- test_shard: None,
logfile: None,
nocapture: false,
color: AutoColor,
- show_boxplot: false,
- boxplot_width: 50,
- show_all_stats: false,
}
}
}
getopts::optflag("", "test", "Run tests and not benchmarks"),
getopts::optflag("", "bench", "Run benchmarks instead of tests"),
getopts::optflag("h", "help", "Display this message (longer with --help)"),
- getopts::optopt("", "save-metrics", "Location to save bench metrics",
- "PATH"),
- getopts::optopt("", "ratchet-metrics",
- "Location to load and save metrics from. The metrics \
- loaded are cause benchmarks to fail if they run too \
- slowly", "PATH"),
- getopts::optopt("", "ratchet-noise-percent",
- "Tests within N% of the recorded metrics will be \
- considered as passing", "PERCENTAGE"),
getopts::optopt("", "logfile", "Write logs to the specified file instead \
of stdout", "PATH"),
- getopts::optopt("", "test-shard", "run shard A, of B shards, worth of the testsuite",
- "A.B"),
getopts::optflag("", "nocapture", "don't capture stdout/stderr of each \
task, allow printing directly"),
getopts::optopt("", "color", "Configure coloring of output:
auto = colorize if stdout is a tty and tests are run on serially (default);
always = always colorize output;
- never = never colorize output;", "auto|always|never"),
- getopts::optflag("", "boxplot", "Display a boxplot of the benchmark statistics"),
- getopts::optopt("", "boxplot-width", "Set the boxplot width (default 50)", "WIDTH"),
- getopts::optflag("", "stats", "Display the benchmark min, max, and quartiles"))
+ never = never colorize output;", "auto|always|never"))
}
fn usage(binary: &str) {
let run_tests = ! run_benchmarks ||
matches.opt_present("test");
- let ratchet_metrics = matches.opt_str("ratchet-metrics");
- let ratchet_metrics = ratchet_metrics.map(|s| Path::new(s));
-
- let ratchet_noise_percent = matches.opt_str("ratchet-noise-percent");
- let ratchet_noise_percent =
- ratchet_noise_percent.map(|s| s.as_slice().parse::<f64>().unwrap());
-
- let save_metrics = matches.opt_str("save-metrics");
- let save_metrics = save_metrics.map(|s| Path::new(s));
-
- let test_shard = matches.opt_str("test-shard");
- let test_shard = opt_shard(test_shard);
-
let mut nocapture = matches.opt_present("nocapture");
if !nocapture {
nocapture = os::getenv("RUST_TEST_NOCAPTURE").is_some();
v))),
};
- let show_boxplot = matches.opt_present("boxplot");
- let boxplot_width = match matches.opt_str("boxplot-width") {
- Some(width) => {
- match FromStr::from_str(width.as_slice()) {
- Some(width) => width,
- None => {
- return Some(Err(format!("argument for --boxplot-width must be a uint")));
- }
- }
- }
- None => 50,
- };
-
- let show_all_stats = matches.opt_present("stats");
-
let test_opts = TestOpts {
filter: filter,
run_ignored: run_ignored,
run_tests: run_tests,
run_benchmarks: run_benchmarks,
- ratchet_metrics: ratchet_metrics,
- ratchet_noise_percent: ratchet_noise_percent,
- save_metrics: save_metrics,
- test_shard: test_shard,
logfile: logfile,
nocapture: nocapture,
color: color,
- show_boxplot: show_boxplot,
- boxplot_width: boxplot_width,
- show_all_stats: show_all_stats,
};
Some(Ok(test_opts))
}
-pub fn opt_shard(maybestr: Option<String>) -> Option<(uint,uint)> {
- match maybestr {
- None => None,
- Some(s) => {
- let mut it = s.split('.');
- match (it.next().and_then(|s| s.parse::<uint>()),
- it.next().and_then(|s| s.parse::<uint>()),
- it.next()) {
- (Some(a), Some(b), None) => {
- if a <= 0 || a > b {
- panic!("tried to run shard {a}.{b}, but {a} is out of bounds \
- (should be between 1 and {b}", a=a, b=b)
- }
- Some((a, b))
- }
- _ => None,
- }
- }
- }
-}
-
-
#[derive(Clone, PartialEq)]
pub struct BenchSamples {
ns_iter_summ: stats::Summary<f64>,
out: out,
log_out: log_out,
use_color: use_color(opts),
- show_boxplot: opts.show_boxplot,
- boxplot_width: opts.boxplot_width,
- show_all_stats: opts.show_all_stats,
+ show_boxplot: false,
+ boxplot_width: 50,
+ show_all_stats: false,
total: 0u,
passed: 0u,
failed: 0u,
None => {}
}
try!(run_tests(opts, tests, |x| callback(&x, &mut st)));
- match opts.save_metrics {
- None => (),
- Some(ref pth) => {
- try!(st.metrics.save(pth));
- try!(st.write_plain(format!("\nmetrics saved to: {:?}",
- pth.display()).as_slice()));
- }
- }
- return st.write_run_finish(&opts.ratchet_metrics, opts.ratchet_noise_percent);
+ return st.write_run_finish(&None, None);
}
#[test]
// Sort the tests alphabetically
filtered.sort_by(|t1, t2| t1.desc.name.as_slice().cmp(t2.desc.name.as_slice()));
- // Shard the remaining tests, if sharding requested.
- match opts.test_shard {
- None => filtered,
- Some((a,b)) => {
- filtered.into_iter().enumerate()
- // note: using a - 1 so that the valid shards, for example, are
- // 1.2 and 2.2 instead of 0.2 and 1.2
- .filter(|&(i,_)| i % b == (a - 1))
- .map(|(_,t)| t)
- .collect()
- }
- }
+ filtered
}
pub fn run_test(opts: &TestOpts,
-Subproject commit ec1fdb3b9d3b1fb9e1dae97a65dd3a13db9bfb23
+Subproject commit b820135911e17c7a46b901db56baa48e5155bf46
//include valgrind.h after stdint.h so that uintptr_t is defined for msys2 w64
#include "valgrind/valgrind.h"
-#ifdef __ANDROID__
-time_t
-timegm(struct tm *tm)
-{
- time_t ret;
- char *tz;
-
- tz = getenv("TZ");
- if (tz)
- tz = strdup(tz);
- setenv("TZ", "", 1);
- tzset();
- ret = mktime(tm);
- if (tz) {
- setenv("TZ", tz, 1);
- free(tz);
- } else
- unsetenv("TZ");
- tzset();
- return ret;
-}
-#endif
-
#ifdef __APPLE__
#if (TARGET_OS_IPHONE)
extern char **environ;
}
#endif
-typedef struct {
- int32_t tm_sec;
- int32_t tm_min;
- int32_t tm_hour;
- int32_t tm_mday;
- int32_t tm_mon;
- int32_t tm_year;
- int32_t tm_wday;
- int32_t tm_yday;
- int32_t tm_isdst;
- int32_t tm_gmtoff;
- int32_t tm_nsec;
-} rust_tm;
-
-void rust_tm_to_tm(rust_tm* in_tm, struct tm* out_tm) {
- memset(out_tm, 0, sizeof(struct tm));
- out_tm->tm_sec = in_tm->tm_sec;
- out_tm->tm_min = in_tm->tm_min;
- out_tm->tm_hour = in_tm->tm_hour;
- out_tm->tm_mday = in_tm->tm_mday;
- out_tm->tm_mon = in_tm->tm_mon;
- out_tm->tm_year = in_tm->tm_year;
- out_tm->tm_wday = in_tm->tm_wday;
- out_tm->tm_yday = in_tm->tm_yday;
- out_tm->tm_isdst = in_tm->tm_isdst;
-}
-
-void tm_to_rust_tm(struct tm* in_tm,
- rust_tm* out_tm,
- int32_t gmtoff,
- int32_t nsec) {
- out_tm->tm_sec = in_tm->tm_sec;
- out_tm->tm_min = in_tm->tm_min;
- out_tm->tm_hour = in_tm->tm_hour;
- out_tm->tm_mday = in_tm->tm_mday;
- out_tm->tm_mon = in_tm->tm_mon;
- out_tm->tm_year = in_tm->tm_year;
- out_tm->tm_wday = in_tm->tm_wday;
- out_tm->tm_yday = in_tm->tm_yday;
- out_tm->tm_isdst = in_tm->tm_isdst;
- out_tm->tm_gmtoff = gmtoff;
- out_tm->tm_nsec = nsec;
-}
-
-#if defined(__WIN32__)
-#define TZSET() _tzset()
-#if defined(_MSC_VER) && (_MSC_VER >= 1400)
-#define GMTIME(clock, result) gmtime_s((result), (clock))
-#define LOCALTIME(clock, result) localtime_s((result), (clock))
-#define TIMEGM(result) _mkgmtime64(result)
-#else
-struct tm* GMTIME(const time_t *clock, struct tm *result) {
- struct tm* t = gmtime(clock);
- if (t == NULL || result == NULL) { return NULL; }
- *result = *t;
- return result;
-}
-struct tm* LOCALTIME(const time_t *clock, struct tm *result) {
- struct tm* t = localtime(clock);
- if (t == NULL || result == NULL) { return NULL; }
- *result = *t;
- return result;
-}
-#define TIMEGM(result) mktime((result)) - _timezone
-#endif
-#else
-#define TZSET() tzset()
-#define GMTIME(clock, result) gmtime_r((clock), (result))
-#define LOCALTIME(clock, result) localtime_r((clock), (result))
-#define TIMEGM(result) timegm(result)
-#endif
-
-void
-rust_tzset() {
- TZSET();
-}
-
-void
-rust_gmtime(int64_t sec, int32_t nsec, rust_tm *timeptr) {
- struct tm tm;
- time_t s = sec;
- GMTIME(&s, &tm);
-
- tm_to_rust_tm(&tm, timeptr, 0, nsec);
-}
-
-void
-rust_localtime(int64_t sec, int32_t nsec, rust_tm *timeptr) {
- struct tm tm;
- time_t s = sec;
- LOCALTIME(&s, &tm);
-
-#if defined(__WIN32__)
- int32_t gmtoff = -timezone;
-#else
- int32_t gmtoff = tm.tm_gmtoff;
-#endif
-
- tm_to_rust_tm(&tm, timeptr, gmtoff, nsec);
-}
-
-int64_t
-rust_timegm(rust_tm* timeptr) {
- struct tm t;
- rust_tm_to_tm(timeptr, &t);
- return TIMEGM(&t);
-}
-
-int64_t
-rust_mktime(rust_tm* timeptr) {
- struct tm t;
- rust_tm_to_tm(timeptr, &t);
- return mktime(&t);
-}
-
#ifndef _WIN32
DIR*
# If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2015-01-13
+2015-01-18
+S 2015-01-20 9006c3c
+ freebsd-x86_64 240b30b33263d175e30f925ed1e1e1a4e553a513
+ linux-i386 544c2063b8d5035342c705b881b8868244c1e9a1
+ linux-x86_64 eb41db80978210a013a8dcf8f4fe804969197337
+ macos-i386 3ed08c5ae66367e85b8f2b207615d45bfd9cf89d
+ macos-x86_64 d102760316b90b17d54b0bef02ca6dc35f82e6bd
+ winnt-i386 6940fef6caa2f64d158b8f5eb00afd5c8e0c71a5
+ winnt-x86_64 36b6f239fe1264bceb4b8202e692b7d49947eebe
+
S 2015-01-15 9ade482
freebsd-x86_64 eb8f52c6e8dc24a293456d5e4dc5d1072442e758
linux-i386 0197ad7179d74eba06a8b46432548caf226aa03d
use syntax::parse::token;
use syntax::ast::{TokenTree, TtToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
-use syntax::ext::build::AstBuilder; // trait for expr_uint
+use syntax::ext::build::AstBuilder; // trait for expr_usize
use rustc::plugin::Registry;
// WARNING WARNING WARNING WARNING WARNING
}
}
- MacExpr::new(cx.expr_uint(sp, total))
+ MacExpr::new(cx.expr_usize(sp, total))
}
#[plugin_registrar]
let d = idx / self.fact[i] as i32;
self.cnt[i] = d;
idx %= self.fact[i] as i32;
- for (place, val) in pp.iter_mut().zip(self.perm.p[..(i+1)].iter()) {
+ for (place, val) in pp.iter_mut().zip(self.perm.p[..i+1].iter()) {
*place = (*val) as u8
}
}
n -= nb;
line[nb] = '\n' as u8;
- try!(wr.write(&line[..(nb+1)]));
+ try!(wr.write(&line[..nb+1]));
}
Ok(())
}
let len = bb.len();
while ii < len - (nn - 1u) {
- it(&bb[ii..(ii+nn)]);
+ it(&bb[ii..ii+nn]);
ii += 1u;
}
- return bb[(len - (nn - 1u))..len].to_vec();
+ return bb[len - (nn - 1u)..len].to_vec();
}
fn make_sequence_processor(sz: uint,
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn f(x: &mut i32) {}
+
+fn main() {
+ let x = 0;
+ f(&x);
+ //~^ ERROR mismatched types
+ //~| expected `&mut i32`
+ //~| found `&_`
+ //~| values differ in mutability
+}
struct Foo<'a> {
a: &'a Bar+'a,
//~^ ERROR E0178
- //~^^ NOTE perhaps you meant `&'a (Bar + 'a)`?
+ //~^^ HELP perhaps you meant `&'a (Bar + 'a)`?
b: &'a mut Bar+'a,
//~^ ERROR E0178
- //~^^ NOTE perhaps you meant `&'a mut (Bar + 'a)`?
+ //~^^ HELP perhaps you meant `&'a mut (Bar + 'a)`?
c: Box<Bar+'a>, // OK, no paren needed in this context
d: fn() -> Bar+'a,
//~^ ERROR E0178
- //~^^ NOTE perhaps you forgot parentheses
+ //~^^ HELP perhaps you forgot parentheses
//~^^^ WARN deprecated syntax
}
impl<H: StreamHasher> StreamHash<H> for u8 {
fn input_stream(&self, stream: &mut H::S) {
- Stream::input(&*stream, &[*self]);
+ Stream::input(stream, &[*self]);
}
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! test { ($wrong:t_ty ..) => () }
+ //~^ ERROR: unrecognized builtin nonterminal `t_ty`
+
+fn main() {}
let y = &mut x;
Foo::bar(&x); //~ERROR cannot borrow `x`
- let x = Foo;
- Foo::baz(&x); //~ERROR cannot borrow immutable borrowed content as mutable
+ let mut x = Foo;
+ let y = &mut x;
+ Foo::baz(&mut x); //~ERROR cannot borrow `x`
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// issue #21405
+
+fn foo<F>(f: F) where F: FnMut(usize) {}
+
+fn main() {
+ foo(|s| s.is_empty());
+ //~^ ERROR does not implement any method
+ //~^^ HELP #1: `core::slice::SliceExt`
+ //~^^^ HELP #2: `core::str::StrExt`
+ //~^^^^ HELP #3: `collections::slice::SliceExt`
+ //~^^^^^ HELP #4: `collections::str::StrExt`
+}
// Unsized type.
let arr: &[_] = &[1us, 2, 3];
- let range = (*arr)..;
+ let range = *arr..;
//~^ ERROR the trait `core::marker::Sized` is not implemented
}
pub fn main() {
let r = {
- (&42is)..&42
+ &42is..&42
//~^ ERROR borrowed value does not live long enough
//~^^ ERROR borrowed value does not live long enough
};
f<X>();
//~^ ERROR: Chained comparison operators require parentheses
- //~^^ HELP: Use ::< instead of < if you meant to specify type arguments.
+ //~^^ HELP: use ::< instead of < if you meant to specify type arguments
}
struct Foo<'a>(&'a isize);
impl<'a> Foo<'a> {
- //~^ HELP shadowed lifetime `'a` declared here
+ //~^ NOTE shadowed lifetime `'a` declared here
fn shadow_in_method<'a>(&'a self) -> &'a isize {
//~^ WARNING lifetime name `'a` shadows another lifetime name that is already in scope
- //~| HELP deprecated
+ //~| NOTE deprecated
self.0
}
fn shadow_in_type<'b>(&'b self) -> &'b isize {
- //~^ HELP shadowed lifetime `'b` declared here
+ //~^ NOTE shadowed lifetime `'b` declared here
let x: for<'b> fn(&'b isize) = panic!();
//~^ WARNING lifetime name `'b` shadows another lifetime name that is already in scope
- //~| HELP deprecated
+ //~| NOTE deprecated
self.0
}
fn main() {
let x: &[isize] = &[1, 2, 3, 4, 5];
// Immutable slices are not mutable.
- let y: &mut[_] = &x[2..4]; //~ ERROR cannot borrow immutable borrowed content as mutable
+ let y: &mut[_] = &x[2..4];
+ //~^ ERROR mismatched types
+ //~| expected `&mut [_]`
+ //~| found `&_`
+ //~| values differ in mutability
}
f<type>();
//~^ ERROR expected identifier, found keyword `type`
//~^^ ERROR: Chained comparison operators require parentheses
- //~^^^ HELP: Use ::< instead of < if you meant to specify type arguments.
+ //~^^^ HELP: use ::< instead of < if you meant to specify type arguments
}
opts.output_types = vec![OutputTypeExe];
opts.maybe_sysroot = Some(sysroot);
- let descriptions = Registry::new(&rustc::DIAGNOSTICS);
+ let descriptions = Registry::new(&rustc::diagnostics::DIAGNOSTICS);
let sess = build_session(opts, None, descriptions);
sess
}
--- /dev/null
+-include ../tools.mk
+
+all: foo.rs
+ $(HOST_RPATH_ENV) $(RUSTDOC) -w html -o $(TMPDIR)/doc foo.rs
+ $(HTMLDOCCK) $(TMPDIR)/doc foo.rs
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(optin_builtin_traits)]
+
+// @matches foo/struct.Alpha.html '//pre' "pub struct Alpha"
+pub struct Alpha;
+// @matches foo/struct.Bravo.html '//pre' "pub struct Bravo<B>"
+pub struct Bravo<B>;
+
+// @matches foo/struct.Alpha.html '//*[@class="impl"]//code' "impl !.*Send.* for .*Alpha"
+impl !Send for Alpha {}
+
+// @matches foo/struct.Bravo.html '//*[@class="impl"]//code' "impl<B> !.*Send.* for .*Bravo.*<B>"
+impl<B> !Send for Bravo<B> {}
let _: Box<[int]> = Box::new([1, 2, 3]);
let _: Box<Fn(int) -> _> = Box::new(|x| (x as u8));
+
+ let _: Vec<Box<Fn(int) -> _>> = vec![
+ Box::new(|x| (x as u8)),
+ box |x| (x as i16 as u8),
+ ];
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let mut shrinker: Box<Iterator<Item=i32>> = Box::new(vec![1].into_iter());
+ println!("{:?}", shrinker.next());
+ for v in shrinker { assert!(false); }
+
+ let mut shrinker: &mut Iterator<Item=i32> = &mut vec![1].into_iter();
+ println!("{:?}", shrinker.next());
+ for v in shrinker { assert!(false); }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let v = vec![1, 2, 3];
+ let boxed: Box<Iterator<Item=i32>> = Box::new(v.into_iter());
+ assert_eq!(boxed.max(), Some(3));
+
+ let v = vec![1, 2, 3];
+ let boxed: &mut Iterator<Item=i32> = &mut v.into_iter();
+ assert_eq!(boxed.max(), Some(3));
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn test<T : Clone>(arg: T) -> T {
+ arg.clone()
+}
+
+#[derive(PartialEq)]
+struct Test(int);
+
+fn main() {
+ // Check that ranges implement clone
+ assert!(test(1..5) == (1..5));
+ assert!(test(..5) == (..5));
+ assert!(test(1..) == (1..));
+ assert!(test(FullRange) == (FullRange));
+
+ // Check that ranges can still be used with non-clone limits
+ assert!((Test(1)..Test(5)) == (Test(1)..Test(5)));
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[derive(Hash)]
+struct Foo {
+ a: Vec<bool>,
+ b: (bool, bool),
+ c: [bool; 2],
+}
+
+fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn one() -> i32 { 1 }
+
+// Make sure the vec![...] macro doesn't introduce hidden rvalue
+// scopes (such as blocks) around the element expressions.
+pub fn main() {
+ assert_eq!(vec![&one(), &one(), &2], vec![&1, &1, &(one()+one())]);
+ assert_eq!(vec![&one(); 2], vec![&1, &one()]);
+}