[submodule "src/llvm-project"]
path = src/llvm-project
url = https://github.com/rust-lang/llvm-project.git
- branch = rustc/8.0-2019-01-16
+ branch = rustc/8.0-2019-03-18
[submodule "src/doc/embedded-book"]
path = src/doc/embedded-book
url = https://github.com/rust-embedded/book.git
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "ena"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "env_logger"
version = "0.5.13"
version = "0.0.0"
dependencies = [
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "ena 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ena 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
"graphviz 0.0.0",
"jobserver 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
"checksum elasticlunr-rs 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a99a310cd1f9770e7bf8e48810c7bcbb0e078c8fb23a8c7bcf0da4c2bf61a455"
"checksum ena 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f56c93cc076508c549d9bb747f79aa9b4eb098be7b8cad8830c3137ef52d1e00"
+"checksum ena 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3dc01d68e08ca384955a3aeba9217102ca1aa85b6e168639bf27739f1d749d87"
"checksum env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)" = "15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38"
"checksum env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afb070faf94c85d17d50ca44f6ad076bce18ae92f0037d350947240a36e9d42e"
"checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3"
let path = builder.src.join("src/doc").join(markdown);
- let favicon = builder.src.join("src/doc/favicon.inc");
+ let header = builder.src.join("src/doc/redirect.inc");
let footer = builder.src.join("src/doc/footer.inc");
let version_info = out.join("version_info.html");
cmd.arg("--html-after-content").arg(&footer)
.arg("--html-before-content").arg(&version_info)
- .arg("--html-in-header").arg(&favicon)
+ .arg("--html-in-header").arg(&header)
.arg("--markdown-no-toc")
.arg("--markdown-playground-url")
.arg("https://play.rust-lang.org/")
src_dir="`dirname $ci_dir`"
root_dir="`dirname $src_dir`"
+objdir=$root_dir/obj
+dist=$objdir/build/dist
+
source "$ci_dir/shared.sh"
travis_fold start build_docker
else
echo "Looks like docker image is the same as before, not uploading"
fi
+ # Record the container image for reuse, e.g. by rustup.rs builds
+ info="$dist/image-$image.txt"
+ mkdir -p "$dist"
+ echo "$url" >"$info"
+ echo "$digest" >>"$info"
fi
elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
if [ -n "$TRAVIS_OS_NAME" ]; then
travis_fold end build_docker
travis_time_finish
-objdir=$root_dir/obj
-
mkdir -p $HOME/.cargo
mkdir -p $objdir/tmp
mkdir -p $objdir/cores
--- /dev/null
+<meta name="robots" content="noindex,follow">
+<link rel="shortcut icon" href="https://www.rust-lang.org/favicon.ico">
= help: the trait `std::iter::Iterator` is not implemented for `&str`
= note: required by `std::iter::IntoIterator::into_iter`
```
+
+If you need to filter on multiple attributes, you can use `all`, `any` or
+`not` in the following way:
+
+```rust,compile_fail
+#[rustc_on_unimplemented(
+ on(
+ all(_Self="&str", T="std::string::String"),
+ note="you can coerce a `{T}` into a `{Self}` by writing `&*variable`"
+ )
+)]
+pub trait From<T>: Sized { /* ... */ }
+```
keys: uninitialized_array![_; CAPACITY],
vals: uninitialized_array![_; CAPACITY],
parent: ptr::null(),
- parent_idx: MaybeUninit::uninitialized(),
+ parent_idx: MaybeUninit::uninit(),
len: 0
}
}
// ever take a pointer past the first key.
static EMPTY_ROOT_NODE: NodeHeader<(), ()> = NodeHeader {
parent: ptr::null(),
- parent_idx: MaybeUninit::uninitialized(),
+ parent_idx: MaybeUninit::uninit(),
len: 0,
keys_start: [],
};
-> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
debug_assert!(!self.is_shared_root());
let mut new_node = Box::new(unsafe { InternalNode::new() });
- new_node.edges[0].set(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
+ new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
self.node = BoxedNode::from_internal(new_node);
self.height += 1;
unsafe {
ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
- self.as_internal_mut().edges.get_unchecked_mut(idx + 1).set(edge.node);
+ self.as_internal_mut().edges.get_unchecked_mut(idx + 1).write(edge.node);
(*self.as_leaf_mut()).len += 1;
let mut child = self.descend();
unsafe {
(*child.as_leaf_mut()).parent = ptr;
- (*child.as_leaf_mut()).parent_idx.set(idx);
+ (*child.as_leaf_mut()).parent_idx.write(idx);
}
}
--- /dev/null
+// Lower-case ASCII 'a' is the first byte that has its highest bit set
+// after wrap-adding 0x1F:
+//
+// b'a' + 0x1F == 0x80 == 0b1000_0000
+// b'z' + 0x1F == 0x98 == 0b10011000
+//
+// Lower-case ASCII 'z' is the last byte that has its highest bit unset
+// after wrap-adding 0x05:
+//
+// b'a' + 0x05 == 0x66 == 0b0110_0110
+// b'z' + 0x05 == 0x7F == 0b0111_1111
+//
+// … except for 0xFB to 0xFF, but those are in the range of bytes
+// that have the highest bit unset again after adding 0x1F.
+//
+// So `(byte + 0x1f) & !(byte + 5)` has its highest bit set
+// iff `byte` is a lower-case ASCII letter.
+//
+// Lower-case ASCII letters all have the 0x20 bit set.
+// (Two positions right of 0x80, the highest bit.)
+// Unsetting that bit produces the same letter, in upper-case.
+//
+// Therefore:
+fn branchless_to_ascii_upper_case(byte: u8) -> u8 {
+ byte &
+ !(
+ (
+ byte.wrapping_add(0x1f) &
+ !byte.wrapping_add(0x05) &
+ 0x80
+ ) >> 2
+ )
+}
+
+
+macro_rules! benches {
+ ($( fn $name: ident($arg: ident: &mut [u8]) $body: block )+ @iter $( $is_: ident, )+) => {
+ benches! {@
+ $( fn $name($arg: &mut [u8]) $body )+
+ $( fn $is_(bytes: &mut [u8]) { bytes.iter().all(u8::$is_) } )+
+ }
+ };
+
+ (@$( fn $name: ident($arg: ident: &mut [u8]) $body: block )+) => {
+ benches!(mod short SHORT $($name $arg $body)+);
+ benches!(mod medium MEDIUM $($name $arg $body)+);
+ benches!(mod long LONG $($name $arg $body)+);
+ };
+
+ (mod $mod_name: ident $input: ident $($name: ident $arg: ident $body: block)+) => {
+ mod $mod_name {
+ use super::*;
+
+ $(
+ #[bench]
+ fn $name(bencher: &mut Bencher) {
+ bencher.bytes = $input.len() as u64;
+ bencher.iter(|| {
+ let mut vec = $input.as_bytes().to_vec();
+ {
+ let $arg = &mut vec[..];
+ black_box($body);
+ }
+ vec
+ })
+ }
+ )+
+ }
+ }
+}
+
+use test::black_box;
+use test::Bencher;
+
+benches! {
+ fn case00_alloc_only(_bytes: &mut [u8]) {}
+
+ fn case01_black_box_read_each_byte(bytes: &mut [u8]) {
+ for byte in bytes {
+ black_box(*byte);
+ }
+ }
+
+ fn case02_lookup_table(bytes: &mut [u8]) {
+ for byte in bytes {
+ *byte = ASCII_UPPERCASE_MAP[*byte as usize]
+ }
+ }
+
+ fn case03_branch_and_subtract(bytes: &mut [u8]) {
+ for byte in bytes {
+ *byte = if b'a' <= *byte && *byte <= b'z' {
+ *byte - b'a' + b'A'
+ } else {
+ *byte
+ }
+ }
+ }
+
+ fn case04_branch_and_mask(bytes: &mut [u8]) {
+ for byte in bytes {
+ *byte = if b'a' <= *byte && *byte <= b'z' {
+ *byte & !0x20
+ } else {
+ *byte
+ }
+ }
+ }
+
+ fn case05_branchless(bytes: &mut [u8]) {
+ for byte in bytes {
+ *byte = branchless_to_ascii_upper_case(*byte)
+ }
+ }
+
+ fn case06_libcore(bytes: &mut [u8]) {
+ bytes.make_ascii_uppercase()
+ }
+
+ fn case07_fake_simd_u32(bytes: &mut [u8]) {
+ let (before, aligned, after) = unsafe {
+ bytes.align_to_mut::<u32>()
+ };
+ for byte in before {
+ *byte = branchless_to_ascii_upper_case(*byte)
+ }
+ for word in aligned {
+ // FIXME: this is incorrect for some byte values:
+ // addition within a byte can carry/overflow into the next byte.
+ // Test case: b"\xFFz "
+ *word &= !(
+ (
+ word.wrapping_add(0x1f1f1f1f) &
+ !word.wrapping_add(0x05050505) &
+ 0x80808080
+ ) >> 2
+ )
+ }
+ for byte in after {
+ *byte = branchless_to_ascii_upper_case(*byte)
+ }
+ }
+
+ fn case08_fake_simd_u64(bytes: &mut [u8]) {
+ let (before, aligned, after) = unsafe {
+ bytes.align_to_mut::<u64>()
+ };
+ for byte in before {
+ *byte = branchless_to_ascii_upper_case(*byte)
+ }
+ for word in aligned {
+ // FIXME: like above, this is incorrect for some byte values.
+ *word &= !(
+ (
+ word.wrapping_add(0x1f1f1f1f_1f1f1f1f) &
+ !word.wrapping_add(0x05050505_05050505) &
+ 0x80808080_80808080
+ ) >> 2
+ )
+ }
+ for byte in after {
+ *byte = branchless_to_ascii_upper_case(*byte)
+ }
+ }
+
+ fn case09_mask_mult_bool_branchy_lookup_table(bytes: &mut [u8]) {
+ fn is_ascii_lowercase(b: u8) -> bool {
+ if b >= 0x80 { return false }
+ match ASCII_CHARACTER_CLASS[b as usize] {
+ L | Lx => true,
+ _ => false,
+ }
+ }
+ for byte in bytes {
+ *byte &= !(0x20 * (is_ascii_lowercase(*byte) as u8))
+ }
+ }
+
+ fn case10_mask_mult_bool_lookup_table(bytes: &mut [u8]) {
+ fn is_ascii_lowercase(b: u8) -> bool {
+ match ASCII_CHARACTER_CLASS[b as usize] {
+ L | Lx => true,
+ _ => false
+ }
+ }
+ for byte in bytes {
+ *byte &= !(0x20 * (is_ascii_lowercase(*byte) as u8))
+ }
+ }
+
+ fn case11_mask_mult_bool_match_range(bytes: &mut [u8]) {
+ fn is_ascii_lowercase(b: u8) -> bool {
+ match b {
+ b'a'...b'z' => true,
+ _ => false
+ }
+ }
+ for byte in bytes {
+ *byte &= !(0x20 * (is_ascii_lowercase(*byte) as u8))
+ }
+ }
+
+ fn case12_mask_shifted_bool_match_range(bytes: &mut [u8]) {
+ fn is_ascii_lowercase(b: u8) -> bool {
+ match b {
+ b'a'...b'z' => true,
+ _ => false
+ }
+ }
+ for byte in bytes {
+ *byte &= !((is_ascii_lowercase(*byte) as u8) << 5)
+ }
+ }
+
+ fn case13_subtract_shifted_bool_match_range(bytes: &mut [u8]) {
+ fn is_ascii_lowercase(b: u8) -> bool {
+ match b {
+ b'a'...b'z' => true,
+ _ => false
+ }
+ }
+ for byte in bytes {
+ *byte -= (is_ascii_lowercase(*byte) as u8) << 5
+ }
+ }
+
+ fn case14_subtract_multiplied_bool_match_range(bytes: &mut [u8]) {
+ fn is_ascii_lowercase(b: u8) -> bool {
+ match b {
+ b'a'...b'z' => true,
+ _ => false
+ }
+ }
+ for byte in bytes {
+ *byte -= (b'a' - b'A') * is_ascii_lowercase(*byte) as u8
+ }
+ }
+
+ @iter
+
+ is_ascii,
+ is_ascii_alphabetic,
+ is_ascii_uppercase,
+ is_ascii_lowercase,
+ is_ascii_alphanumeric,
+ is_ascii_digit,
+ is_ascii_hexdigit,
+ is_ascii_punctuation,
+ is_ascii_graphic,
+ is_ascii_whitespace,
+ is_ascii_control,
+}
+
+macro_rules! repeat {
+ ($s: expr) => { concat!($s, $s, $s, $s, $s, $s, $s, $s, $s, $s) }
+}
+
+const SHORT: &'static str = "Alice's";
+const MEDIUM: &'static str = "Alice's Adventures in Wonderland";
+const LONG: &'static str = repeat!(r#"
+ La Guida di Bragia, a Ballad Opera for the Marionette Theatre (around 1850)
+ Alice's Adventures in Wonderland (1865)
+ Phantasmagoria and Other Poems (1869)
+ Through the Looking-Glass, and What Alice Found There
+ (includes "Jabberwocky" and "The Walrus and the Carpenter") (1871)
+ The Hunting of the Snark (1876)
+ Rhyme? And Reason? (1883) – shares some contents with the 1869 collection,
+ including the long poem "Phantasmagoria"
+ A Tangled Tale (1885)
+ Sylvie and Bruno (1889)
+ Sylvie and Bruno Concluded (1893)
+ Pillow Problems (1893)
+ What the Tortoise Said to Achilles (1895)
+ Three Sunsets and Other Poems (1898)
+ The Manlet (1903)[106]
+"#);
+
+const ASCII_UPPERCASE_MAP: [u8; 256] = [
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+ 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
+ b' ', b'!', b'"', b'#', b'$', b'%', b'&', b'\'',
+ b'(', b')', b'*', b'+', b',', b'-', b'.', b'/',
+ b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7',
+ b'8', b'9', b':', b';', b'<', b'=', b'>', b'?',
+ b'@', b'A', b'B', b'C', b'D', b'E', b'F', b'G',
+ b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O',
+ b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W',
+ b'X', b'Y', b'Z', b'[', b'\\', b']', b'^', b'_',
+ b'`',
+
+ b'A', b'B', b'C', b'D', b'E', b'F', b'G',
+ b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O',
+ b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W',
+ b'X', b'Y', b'Z',
+
+ b'{', b'|', b'}', b'~', 0x7f,
+ 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
+ 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+ 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
+ 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+ 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
+ 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
+ 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
+ 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
+ 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
+ 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
+ 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
+ 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
+ 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
+ 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
+ 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
+];
+
+enum AsciiCharacterClass {
+ C, // control
+ Cw, // control whitespace
+ W, // whitespace
+ D, // digit
+ L, // lowercase
+ Lx, // lowercase hex digit
+ U, // uppercase
+ Ux, // uppercase hex digit
+ P, // punctuation
+ N, // Non-ASCII
+}
+use self::AsciiCharacterClass::*;
+
+static ASCII_CHARACTER_CLASS: [AsciiCharacterClass; 256] = [
+// _0 _1 _2 _3 _4 _5 _6 _7 _8 _9 _a _b _c _d _e _f
+ C, C, C, C, C, C, C, C, C, Cw,Cw,C, Cw,Cw,C, C, // 0_
+ C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, // 1_
+ W, P, P, P, P, P, P, P, P, P, P, P, P, P, P, P, // 2_
+ D, D, D, D, D, D, D, D, D, D, P, P, P, P, P, P, // 3_
+ P, Ux,Ux,Ux,Ux,Ux,Ux,U, U, U, U, U, U, U, U, U, // 4_
+ U, U, U, U, U, U, U, U, U, U, U, P, P, P, P, P, // 5_
+ P, Lx,Lx,Lx,Lx,Lx,Lx,L, L, L, L, L, L, L, L, L, // 6_
+ L, L, L, L, L, L, L, L, L, L, L, P, P, P, P, C, // 7_
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+ N, N, N, N, N, N, N, N, N, N, N, N, N, N, N, N,
+];
extern crate test;
mod any;
+mod ascii;
mod char;
mod hash;
mod iter;
//! Traits for conversions between types.
//!
-//! The traits in this module provide a general way to talk about conversions
-//! from one type to another. They follow the standard Rust conventions of
-//! `as`/`into`/`from`.
+//! The traits in this module provide a way to convert from one type to another type.
+//! Each trait serves a different purpose:
//!
-//! Like many traits, these are often used as bounds for generic functions, to
-//! support arguments of multiple types.
+//! - Implement the [`AsRef`] trait for cheap reference-to-reference conversions
+//! - Implement the [`AsMut`] trait for cheap mutable-to-mutable conversions
+//! - Implement the [`From`] trait for consuming value-to-value conversions
+//! - Implement the [`Into`] trait for consuming value-to-value conversions to types
+//! outside the current crate
+//! - The [`TryFrom`] and [`TryInto`] traits behave like [`From`] and [`Into`],
+//! but should be implemented when the conversion can fail.
//!
-//! - Implement the `As*` traits for reference-to-reference conversions
-//! - Implement the [`Into`] trait when you want to consume the value in the conversion
-//! - The [`From`] trait is the most flexible, useful for value _and_ reference conversions
-//! - The [`TryFrom`] and [`TryInto`] traits behave like [`From`] and [`Into`], but allow for the
-//! conversion to fail
+//! The traits in this module are often used as trait bounds for generic functions such that to
+//! arguments of multiple types are supported. See the documentation of each trait for examples.
//!
-//! As a library author, you should prefer implementing [`From<T>`][`From`] or
+//! As a library author, you should always prefer implementing [`From<T>`][`From`] or
//! [`TryFrom<T>`][`TryFrom`] rather than [`Into<U>`][`Into`] or [`TryInto<U>`][`TryInto`],
//! as [`From`] and [`TryFrom`] provide greater flexibility and offer
//! equivalent [`Into`] or [`TryInto`] implementations for free, thanks to a
-//! blanket implementation in the standard library. However, there are some cases
-//! where this is not possible, such as creating conversions into a type defined
-//! outside your library, so implementing [`Into`] instead of [`From`] is
-//! sometimes necessary.
+//! blanket implementation in the standard library. Only implement [`Into`] or [`TryInto`]
+//! when a conversion to a type outside the current crate is required.
//!
//! # Generic Implementations
//!
#[inline]
pub const fn identity<T>(x: T) -> T { x }
-/// A cheap reference-to-reference conversion. Used to convert a value to a
-/// reference value within generic code.
+/// Used to do a cheap reference-to-reference conversion.
///
-/// `AsRef` is very similar to, but serves a slightly different purpose than,
-/// [`Borrow`].
+/// This trait is similar to [`AsMut`] which is used for converting between mutable references.
+/// If you need to do a costly conversion it is better to implement [`From`] with type
+/// `&T` or write a custom function.
///
-/// `AsRef` is to be used when wishing to convert to a reference of another
-/// type.
-/// `Borrow` is more related to the notion of taking the reference. It is
-/// useful when wishing to abstract over the type of reference
-/// (`&T`, `&mut T`) or allow both the referenced and owned type to be treated
-/// in the same manner.
///
-/// The key difference between the two traits is the intention:
+/// `AsRef` is very similar to, but serves a slightly different purpose than [`Borrow`]:
///
/// - Use `AsRef` when the goal is to simply convert into a reference
/// - Use `Borrow` when the goal is related to writing code that is agnostic to
///
/// [`Borrow`]: ../../std/borrow/trait.Borrow.html
///
-/// **Note: this trait must not fail**. If the conversion can fail, use a
+/// **Note: This trait must not fail**. If the conversion can fail, use a
/// dedicated method which returns an [`Option<T>`] or a [`Result<T, E>`].
///
/// [`Option<T>`]: ../../std/option/enum.Option.html
///
/// # Examples
///
-/// Both [`String`] and `&str` implement `AsRef<str>`:
+/// By using trait bounds we can accept arguments of different types as long as they can be
+/// converted a the specified type `T`.
+///
+/// For example: By creating a generic function that takes an `AsRef<str>` we express that we
+/// want to accept all references that can be converted to &str as an argument.
+/// Since both [`String`] and `&str` implement `AsRef<str>` we can accept both as input argument.
///
/// [`String`]: ../../std/string/struct.String.html
///
fn as_ref(&self) -> &T;
}
-/// A cheap, mutable reference-to-mutable reference conversion.
+/// Used to do a cheap mutable-to-mutable reference conversion.
///
-/// This trait is similar to `AsRef` but used for converting between mutable
-/// references.
+/// This trait is similar to [`AsRef`] but used for converting between mutable
+/// references. If you need to do a costly conversion it is better to
+/// implement [`From`] with type `&mut T` or write a custom function.
///
-/// **Note: this trait must not fail**. If the conversion can fail, use a
+/// **Note: This trait must not fail**. If the conversion can fail, use a
/// dedicated method which returns an [`Option<T>`] or a [`Result<T, E>`].
///
/// [`Option<T>`]: ../../std/option/enum.Option.html
///
/// # Examples
///
-/// [`Box<T>`] implements `AsMut<T>`:
-///
-/// [`Box<T>`]: ../../std/boxed/struct.Box.html
-///
+/// Using `AsMut` as trait bound for a generic function we can accept all mutable references
+/// that can be converted to type `&mut T`. Because [`Box<T>`] implements `AsMut<T>` we can
+/// write a function `add_one`that takes all arguments that can be converted to `&mut u64`.
+/// Because [`Box<T>`] implements `AsMut<T>` `add_one` accepts arguments of type
+/// `&mut Box<u64>` as well:
/// ```
/// fn add_one<T: AsMut<u64>>(num: &mut T) {
/// *num.as_mut() += 1;
/// add_one(&mut boxed_num);
/// assert_eq!(*boxed_num, 1);
/// ```
-///
+/// [`Box<T>`]: ../../std/boxed/struct.Box.html
///
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsMut<T: ?Sized> {
fn as_mut(&mut self) -> &mut T;
}
-/// A conversion that consumes `self`, which may or may not be expensive. The
-/// reciprocal of [`From`][From].
+/// A value-to-value conversion that consumes the input value. The
+/// opposite of [`From`].
///
-/// **Note: this trait must not fail**. If the conversion can fail, use
-/// [`TryInto`] or a dedicated method which returns an [`Option<T>`] or a
-/// [`Result<T, E>`].
+/// One should only implement [`Into`] if a conversion to a type outside the current crate is
+/// required. Otherwise one should always prefer implementing [`From`] over [`Into`] because
+/// implementing [`From`] automatically provides one with a implementation of [`Into`] thanks to
+/// the blanket implementation in the standard library. [`From`] cannot do these type of
+/// conversions because of Rust's orphaning rules.
///
-/// Library authors should not directly implement this trait, but should prefer
-/// implementing the [`From`][From] trait, which offers greater flexibility and
-/// provides an equivalent `Into` implementation for free, thanks to a blanket
-/// implementation in the standard library.
+/// **Note: This trait must not fail**. If the conversion can fail, use [`TryInto`].
///
/// # Generic Implementations
///
-/// - [`From<T>`][From]` for U` implies `Into<U> for T`
-/// - [`into`] is reflexive, which means that `Into<T> for T` is implemented
+/// - [`From<T>`]` for U` implies `Into<U> for T`
+/// - [`Into`]` is reflexive, which means that `Into<T> for T` is implemented
///
-/// # Implementing `Into`
+/// # Implementing `Into` for conversions to external types
///
-/// There is one exception to implementing `Into`, and it's kind of esoteric.
-/// If the destination type is not part of the current crate, and it uses a
-/// generic variable, then you can't implement `From` directly. For example,
-/// take this crate:
+/// If the destination type is not part of the current crate
+/// then you can't implement [`From`] directly.
+/// For example, take this code:
///
/// ```compile_fail
/// struct Wrapper<T>(Vec<T>);
/// }
/// }
/// ```
-///
-/// To fix this, you can implement `Into` directly:
+/// This will fail to compile because we cannot implement a trait for a type
+/// if both the trait and the type are not defined by the current crate.
+/// This is due to Rust's orphaning rules. To bypass this, you can implement `Into` directly:
///
/// ```
/// struct Wrapper<T>(Vec<T>);
/// }
/// ```
///
-/// This won't always allow the conversion: for example, `try!` and `?`
-/// always use `From`. However, in most cases, people use `Into` to do the
-/// conversions, and this will allow that.
+/// It is important to understand that `Into` does not provide a [`From`] implementation
+/// (as [`From`] does with `Into`). Therefore, you should always try to implement [`From`]
+/// and then fall back to `Into` if [`From`] can't be implemented.
///
-/// In almost all cases, you should try to implement `From`, then fall back
-/// to `Into` if `From` can't be implemented.
+/// Prefer using `Into` over [`From`] when specifying trait bounds on a generic function
+/// to ensure that types that only implement `Into` can be used as well.
///
/// # Examples
///
/// [`String`] implements `Into<Vec<u8>>`:
///
+/// In order to express that we want a generic function to take all arguments that can be
+/// converted to a specified type `T`, we can use a trait bound of `Into<T>`.
+/// For example: The function `is_hello` takes all arguments that can be converted into a
+/// `Vec<u8>`.
+///
/// ```
/// fn is_hello<T: Into<Vec<u8>>>(s: T) {
/// let bytes = b"hello".to_vec();
fn into(self) -> T;
}
-/// Simple and safe type conversions in to `Self`. It is the reciprocal of
-/// `Into`.
+/// Used to do value-to-value conversions while consuming the input value. It is the reciprocal of
+/// [`Into`].
///
-/// This trait is useful when performing error handling as described by
-/// [the book][book] and is closely related to the `?` operator.
+/// One should always prefer implementing [`From`] over [`Into`]
+/// because implementing [`From`] automatically provides one with a implementation of [`Into`]
+/// thanks to the blanket implementation in the standard library.
///
-/// When constructing a function that is capable of failing the return type
-/// will generally be of the form `Result<T, E>`.
+/// Only implement [`Into`] if a conversion to a type outside the current crate is required.
+/// [`From`] cannot do these type of conversions because of Rust's orphaning rules.
+/// See [`Into`] for more details.
///
-/// The `From` trait allows for simplification of error handling by providing a
-/// means of returning a single error type that encapsulates numerous possible
-/// erroneous situations.
+/// Prefer using [`Into`] over using [`From`] when specifying trait bounds on a generic function.
+/// This way, types that directly implement [`Into`] can be used as arguments as well.
///
-/// This trait is not limited to error handling, rather the general case for
-/// this trait would be in any type conversions to have an explicit definition
-/// of how they are performed.
+/// The [`From`] is also very useful when performing error handling. When constructing a function
+/// that is capable of failing, the return type will generally be of the form `Result<T, E>`.
+/// The `From` trait simplifies error handling by allowing a function to return a single error type
+/// that encapsulate multiple error types. See the "Examples" section and [the book][book] for more
+/// details.
///
-/// **Note: this trait must not fail**. If the conversion can fail, use
-/// [`TryFrom`] or a dedicated method which returns an [`Option<T>`] or a
-/// [`Result<T, E>`].
+/// **Note: This trait must not fail**. If the conversion can fail, use [`TryFrom`].
///
/// # Generic Implementations
///
-/// - `From<T> for U` implies [`Into<U>`]` for T`
-/// - [`from`] is reflexive, which means that `From<T> for T` is implemented
+/// - [`From<T>`]` for U` implies [`Into<U>`]` for T`
+/// - [`From`] is reflexive, which means that `From<T> for T` is implemented
///
/// # Examples
///
/// [`String`] implements `From<&str>`:
///
+/// An explicit conversion from a &str to a String is done as follows:
/// ```
/// let string = "hello".to_string();
/// let other_string = String::from("hello");
/// assert_eq!(string, other_string);
/// ```
///
-/// An example usage for error handling:
+/// While performing error handling it is often useful to implement `From` for your own error type.
+/// By converting underlying error types to our own custom error type that encapsulates the
+/// underlying error type, we can return a single error type without losing information on the
+/// underlying cause. The '?' operator automatically converts the underlying error type to our
+/// custom error type by calling `Into<CliError>::into` which is automatically provided when
+/// implementing `From`. The compiler then infers which implementation of `Into` should be used.
///
/// ```
/// use std::fs;
/// [`from`]: trait.From.html#tymethod.from
/// [book]: ../../book/ch09-00-error-handling.html
#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_on_unimplemented(
+ on(
+ all(_Self="&str", T="std::string::String"),
+ note="to coerce a `{T}` into a `{Self}`, use `&*` as a prefix",
+ )
+)]
pub trait From<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
where T: flt2dec::DecodableFloat
{
unsafe {
- let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64
- let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninitialized();
+ let mut buf = MaybeUninit::<[u8; 1024]>::uninit(); // enough for f32 and f64
+ let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninit();
// FIXME(#53491): Technically, this is calling `get_mut` on an uninitialized
// `MaybeUninit` (here and elsewhere in this file). Revisit this once
// we decided whether that is valid or not.
{
unsafe {
// enough for f32 and f64
- let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized();
- let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninitialized();
+ let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninit();
+ let mut parts = MaybeUninit::<[flt2dec::Part; 4]>::uninit();
// FIXME(#53491)
let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest, *num,
sign, precision, false, buf.get_mut(),
where T: flt2dec::DecodableFloat
{
unsafe {
- let mut buf = MaybeUninit::<[u8; 1024]>::uninitialized(); // enough for f32 and f64
- let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized();
+ let mut buf = MaybeUninit::<[u8; 1024]>::uninit(); // enough for f32 and f64
+ let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninit();
// FIXME(#53491)
let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
{
unsafe {
// enough for f32 and f64
- let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninitialized();
- let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninitialized();
+ let mut buf = MaybeUninit::<[u8; flt2dec::MAX_SIG_DIGITS]>::uninit();
+ let mut parts = MaybeUninit::<[flt2dec::Part; 6]>::uninit();
// FIXME(#53491)
let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, (0, 0), upper,
for byte in buf.iter_mut().rev() {
let n = x % base; // Get the current place value.
x = x / base; // Deaccumulate the number.
- byte.set(Self::digit(n.to_u8())); // Store the digit in the buffer.
+ byte.write(Self::digit(n.to_u8())); // Store the digit in the buffer.
curr -= 1;
if x == zero {
// No more digits left to accumulate.
for byte in buf.iter_mut().rev() {
let n = zero - (x % base); // Get the current place value.
x = x / base; // Deaccumulate the number.
- byte.set(Self::digit(n.to_u8())); // Store the digit in the buffer.
+ byte.write(Self::digit(n.to_u8())); // Store the digit in the buffer.
curr -= 1;
if x == zero {
// No more digits left to accumulate.
#[macro_export]
#[unstable(feature = "maybe_uninit_array", issue = "53491")]
macro_rules! uninitialized_array {
- // This `into_initialized` is safe because an array of `MaybeUninit` does not
+ // This `assume_init` is safe because an array of `MaybeUninit` does not
// require initialization.
// FIXME(#49147): Could be replaced by an array initializer, once those can
// be any const expression.
($t:ty; $size:expr) => (unsafe {
- MaybeUninit::<[MaybeUninit<$t>; $size]>::uninitialized().into_initialized()
+ MaybeUninit::<[MaybeUninit<$t>; $size]>::uninit().assume_init()
});
}
/// [copy_no]: ../intrinsics/fn.copy_nonoverlapping.html
/// [`Drop`]: ../ops/trait.Drop.html
#[inline]
-#[rustc_deprecated(since = "2.0.0", reason = "use `mem::MaybeUninit::uninitialized` instead")]
+#[rustc_deprecated(since = "2.0.0", reason = "use `mem::MaybeUninit::uninit` instead")]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn uninitialized<T>() -> T {
intrinsics::panic_if_uninhabited::<T>();
///
/// let x: &i32 = unsafe { mem::zeroed() }; // undefined behavior!
/// // The equivalent code with `MaybeUninit<&i32>`:
-/// let x: &i32 = unsafe { MaybeUninit::zeroed().into_initialized() }; // undefined behavior!
+/// let x: &i32 = unsafe { MaybeUninit::zeroed().assume_init() }; // undefined behavior!
/// ```
///
/// This is exploited by the compiler for various optimizations, such as eliding
///
/// let b: bool = unsafe { mem::uninitialized() }; // undefined behavior!
/// // The equivalent code with `MaybeUninit<bool>`:
-/// let b: bool = unsafe { MaybeUninit::uninitialized().into_initialized() }; // undefined behavior!
+/// let b: bool = unsafe { MaybeUninit::uninit().assume_init() }; // undefined behavior!
/// ```
///
/// Moreover, uninitialized memory is special in that the compiler knows that
///
/// let x: i32 = unsafe { mem::uninitialized() }; // undefined behavior!
/// // The equivalent code with `MaybeUninit<i32>`:
-/// let x: i32 = unsafe { MaybeUninit::uninitialized().into_initialized() }; // undefined behavior!
+/// let x: i32 = unsafe { MaybeUninit::uninit().assume_init() }; // undefined behavior!
/// ```
/// (Notice that the rules around uninitialized integers are not finalized yet, but
/// until they are, it is advisable to avoid them.)
///
/// // Create an explicitly uninitialized reference. The compiler knows that data inside
/// // a `MaybeUninit<T>` may be invalid, and hence this is not UB:
-/// let mut x = MaybeUninit::<&i32>::uninitialized();
+/// let mut x = MaybeUninit::<&i32>::uninit();
/// // Set it to a valid value.
-/// x.set(&0);
+/// x.write(&0);
/// // Extract the initialized data -- this is only allowed *after* properly
/// // initializing `x`!
-/// let x = unsafe { x.into_initialized() };
+/// let x = unsafe { x.assume_init() };
/// ```
///
/// The compiler then knows to not make any incorrect assumptions or optimizations on this code.
/// It is your responsibility to make sure `T` gets dropped if it got initialized.
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
- pub const fn uninitialized() -> MaybeUninit<T> {
+ pub const fn uninit() -> MaybeUninit<T> {
MaybeUninit { uninit: () }
}
+ /// Deprecated before stabilization.
+ #[unstable(feature = "maybe_uninit", issue = "53491")]
+ #[inline(always)]
+ // FIXME: still used by stdsimd
+ // #[rustc_deprecated(since = "1.35.0", reason = "use `uninit` instead")]
+ pub const fn uninitialized() -> MaybeUninit<T> {
+ Self::uninit()
+ }
+
/// Creates a new `MaybeUninit<T>` in an uninitialized state, with the memory being
/// filled with `0` bytes. It depends on `T` whether that already makes for
/// proper initialization. For example, `MaybeUninit<usize>::zeroed()` is initialized,
/// use std::mem::MaybeUninit;
///
/// let x = MaybeUninit::<(u8, bool)>::zeroed();
- /// let x = unsafe { x.into_initialized() };
+ /// let x = unsafe { x.assume_init() };
/// assert_eq!(x, (0, false));
/// ```
///
/// enum NotZero { One = 1, Two = 2 };
///
/// let x = MaybeUninit::<(u8, NotZero)>::zeroed();
- /// let x = unsafe { x.into_initialized() };
+ /// let x = unsafe { x.assume_init() };
/// // Inside a pair, we create a `NotZero` that does not have a valid discriminant.
/// // This is undefined behavior.
/// ```
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline]
pub fn zeroed() -> MaybeUninit<T> {
- let mut u = MaybeUninit::<T>::uninitialized();
+ let mut u = MaybeUninit::<T>::uninit();
unsafe {
u.as_mut_ptr().write_bytes(0u8, 1);
}
/// reference to the (now safely initialized) contents of `self`.
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
- pub fn set(&mut self, val: T) -> &mut T {
+ pub fn write(&mut self, val: T) -> &mut T {
unsafe {
self.value = ManuallyDrop::new(val);
self.get_mut()
}
}
+ /// Deprecated before stabilization.
+ #[unstable(feature = "maybe_uninit", issue = "53491")]
+ #[inline(always)]
+ #[rustc_deprecated(since = "1.35.0", reason = "use `write` instead")]
+ pub fn set(&mut self, val: T) -> &mut T {
+ self.write(val)
+ }
+
/// Gets a pointer to the contained value. Reading from this pointer or turning it
/// into a reference is undefined behavior unless the `MaybeUninit<T>` is initialized.
///
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let mut x = MaybeUninit::<Vec<u32>>::uninitialized();
+ /// let mut x = MaybeUninit::<Vec<u32>>::uninit();
/// unsafe { x.as_mut_ptr().write(vec![0,1,2]); }
/// // Create a reference into the `MaybeUninit<T>`. This is okay because we initialized it.
/// let x_vec = unsafe { &*x.as_ptr() };
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let x = MaybeUninit::<Vec<u32>>::uninitialized();
+ /// let x = MaybeUninit::<Vec<u32>>::uninit();
/// let x_vec = unsafe { &*x.as_ptr() };
/// // We have created a reference to an uninitialized vector! This is undefined behavior.
/// ```
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let mut x = MaybeUninit::<Vec<u32>>::uninitialized();
+ /// let mut x = MaybeUninit::<Vec<u32>>::uninit();
/// unsafe { x.as_mut_ptr().write(vec![0,1,2]); }
/// // Create a reference into the `MaybeUninit<Vec<u32>>`.
/// // This is okay because we initialized it.
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let mut x = MaybeUninit::<Vec<u32>>::uninitialized();
+ /// let mut x = MaybeUninit::<Vec<u32>>::uninit();
/// let x_vec = unsafe { &mut *x.as_mut_ptr() };
/// // We have created a reference to an uninitialized vector! This is undefined behavior.
/// ```
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let mut x = MaybeUninit::<bool>::uninitialized();
+ /// let mut x = MaybeUninit::<bool>::uninit();
/// unsafe { x.as_mut_ptr().write(true); }
- /// let x_init = unsafe { x.into_initialized() };
+ /// let x_init = unsafe { x.assume_init() };
/// assert_eq!(x_init, true);
/// ```
///
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let x = MaybeUninit::<Vec<u32>>::uninitialized();
- /// let x_init = unsafe { x.into_initialized() };
+ /// let x = MaybeUninit::<Vec<u32>>::uninit();
+ /// let x_init = unsafe { x.assume_init() };
/// // `x` had not been initialized yet, so this last line caused undefined behavior.
/// ```
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
- pub unsafe fn into_initialized(self) -> T {
+ pub unsafe fn assume_init(self) -> T {
intrinsics::panic_if_uninhabited::<T>();
ManuallyDrop::into_inner(self.value)
}
+ /// Deprecated before stabilization.
+ #[unstable(feature = "maybe_uninit", issue = "53491")]
+ #[inline(always)]
+ // FIXME: still used by stdsimd
+ // #[rustc_deprecated(since = "1.35.0", reason = "use `assume_init` instead")]
+ pub unsafe fn into_initialized(self) -> T {
+ self.assume_init()
+ }
+
/// Reads the value from the `MaybeUninit<T>` container. The resulting `T` is subject
/// to the usual drop handling.
///
- /// Whenever possible, it is preferrable to use [`into_initialized`] instead, which
+ /// Whenever possible, it is preferrable to use [`assume_init`] instead, which
/// prevents duplicating the content of the `MaybeUninit<T>`.
///
/// # Safety
/// behavior.
///
/// Moreover, this leaves a copy of the same data behind in the `MaybeUninit<T>`. When using
- /// multiple copies of the data (by calling `read_initialized` multiple times, or first
- /// calling `read_initialized` and then [`into_initialized`]), it is your responsibility
+ /// multiple copies of the data (by calling `read` multiple times, or first
+ /// calling `read` and then [`assume_init`]), it is your responsibility
/// to ensure that that data may indeed be duplicated.
///
- /// [`into_initialized`]: #method.into_initialized
+ /// [`assume_init`]: #method.assume_init
///
/// # Examples
///
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let mut x = MaybeUninit::<u32>::uninitialized();
- /// x.set(13);
- /// let x1 = unsafe { x.read_initialized() };
+ /// let mut x = MaybeUninit::<u32>::uninit();
+ /// x.write(13);
+ /// let x1 = unsafe { x.read() };
/// // `u32` is `Copy`, so we may read multiple times.
- /// let x2 = unsafe { x.read_initialized() };
+ /// let x2 = unsafe { x.read() };
/// assert_eq!(x1, x2);
///
- /// let mut x = MaybeUninit::<Option<Vec<u32>>>::uninitialized();
- /// x.set(None);
- /// let x1 = unsafe { x.read_initialized() };
+ /// let mut x = MaybeUninit::<Option<Vec<u32>>>::uninit();
+ /// x.write(None);
+ /// let x1 = unsafe { x.read() };
/// // Duplicating a `None` value is okay, so we may read multiple times.
- /// let x2 = unsafe { x.read_initialized() };
+ /// let x2 = unsafe { x.read() };
/// assert_eq!(x1, x2);
/// ```
///
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
///
- /// let mut x = MaybeUninit::<Option<Vec<u32>>>::uninitialized();
- /// x.set(Some(vec![0,1,2]));
- /// let x1 = unsafe { x.read_initialized() };
- /// let x2 = unsafe { x.read_initialized() };
+ /// let mut x = MaybeUninit::<Option<Vec<u32>>>::uninit();
+ /// x.write(Some(vec![0,1,2]));
+ /// let x1 = unsafe { x.read() };
+ /// let x2 = unsafe { x.read() };
/// // We now created two copies of the same vector, leading to a double-free when
/// // they both get dropped!
/// ```
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
- pub unsafe fn read_initialized(&self) -> T {
+ pub unsafe fn read(&self) -> T {
intrinsics::panic_if_uninhabited::<T>();
self.as_ptr().read()
}
+ /// Deprecated before stabilization.
+ #[unstable(feature = "maybe_uninit", issue = "53491")]
+ #[inline(always)]
+ #[rustc_deprecated(since = "1.35.0", reason = "use `read` instead")]
+ pub unsafe fn read_initialized(&self) -> T {
+ self.read()
+ }
+
/// Gets a reference to the contained value.
///
/// # Safety
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn to_ascii_uppercase(&self) -> u8 {
- ASCII_UPPERCASE_MAP[*self as usize]
+ // Unset the fith bit if this is a lowercase letter
+ *self & !((self.is_ascii_lowercase() as u8) << 5)
}
/// Makes a copy of the value in its ASCII lower case equivalent.
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn to_ascii_lowercase(&self) -> u8 {
- ASCII_LOWERCASE_MAP[*self as usize]
+ // Set the fith bit if this is an uppercase letter
+ *self | ((self.is_ascii_uppercase() as u8) << 5)
}
/// Checks that two values are an ASCII case-insensitive match.
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_alphabetic(&self) -> bool {
- if *self >= 0x80 { return false; }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- L | Lx | U | Ux => true,
+ match *self {
+ b'A'...b'Z' | b'a'...b'z' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_uppercase(&self) -> bool {
- if *self >= 0x80 { return false }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- U | Ux => true,
+ match *self {
+ b'A'...b'Z' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_lowercase(&self) -> bool {
- if *self >= 0x80 { return false }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- L | Lx => true,
+ match *self {
+ b'a'...b'z' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_alphanumeric(&self) -> bool {
- if *self >= 0x80 { return false }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- D | L | Lx | U | Ux => true,
+ match *self {
+ b'0'...b'9' | b'A'...b'Z' | b'a'...b'z' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_digit(&self) -> bool {
- if *self >= 0x80 { return false }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- D => true,
+ match *self {
+ b'0'...b'9' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_hexdigit(&self) -> bool {
- if *self >= 0x80 { return false }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- D | Lx | Ux => true,
+ match *self {
+ b'0'...b'9' | b'A'...b'F' | b'a'...b'f' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_punctuation(&self) -> bool {
- if *self >= 0x80 { return false }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- P => true,
+ match *self {
+ b'!'...b'/' | b':'...b'@' | b'['...b'`' | b'{'...b'~' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_graphic(&self) -> bool {
- if *self >= 0x80 { return false; }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- Ux | U | Lx | L | D | P => true,
+ match *self {
+ b'!'...b'~' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_whitespace(&self) -> bool {
- if *self >= 0x80 { return false; }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- Cw | W => true,
+ match *self {
+ b'\t' | b'\n' | b'\x0C' | b'\r' | b' ' => true,
_ => false
}
}
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[inline]
pub fn is_ascii_control(&self) -> bool {
- if *self >= 0x80 { return false; }
- match ASCII_CHARACTER_CLASS[*self as usize] {
- C | Cw => true,
+ match *self {
+ b'\0'...b'\x1F' | b'\x7F' => true,
_ => false
}
}
// Float -> Float
impl_from! { f32, f64, #[stable(feature = "lossless_float_conv", since = "1.6.0")] }
-
-static ASCII_LOWERCASE_MAP: [u8; 256] = [
- 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
- 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
- 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
- 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
- b' ', b'!', b'"', b'#', b'$', b'%', b'&', b'\'',
- b'(', b')', b'*', b'+', b',', b'-', b'.', b'/',
- b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7',
- b'8', b'9', b':', b';', b'<', b'=', b'>', b'?',
- b'@',
-
- b'a', b'b', b'c', b'd', b'e', b'f', b'g',
- b'h', b'i', b'j', b'k', b'l', b'm', b'n', b'o',
- b'p', b'q', b'r', b's', b't', b'u', b'v', b'w',
- b'x', b'y', b'z',
-
- b'[', b'\\', b']', b'^', b'_',
- b'`', b'a', b'b', b'c', b'd', b'e', b'f', b'g',
- b'h', b'i', b'j', b'k', b'l', b'm', b'n', b'o',
- b'p', b'q', b'r', b's', b't', b'u', b'v', b'w',
- b'x', b'y', b'z', b'{', b'|', b'}', b'~', 0x7f,
- 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
- 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
- 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
- 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
- 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
- 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
- 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
- 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
- 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
- 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
- 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
- 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
- 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
- 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
- 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
- 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
-];
-
-static ASCII_UPPERCASE_MAP: [u8; 256] = [
- 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
- 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
- 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
- 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
- b' ', b'!', b'"', b'#', b'$', b'%', b'&', b'\'',
- b'(', b')', b'*', b'+', b',', b'-', b'.', b'/',
- b'0', b'1', b'2', b'3', b'4', b'5', b'6', b'7',
- b'8', b'9', b':', b';', b'<', b'=', b'>', b'?',
- b'@', b'A', b'B', b'C', b'D', b'E', b'F', b'G',
- b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O',
- b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W',
- b'X', b'Y', b'Z', b'[', b'\\', b']', b'^', b'_',
- b'`',
-
- b'A', b'B', b'C', b'D', b'E', b'F', b'G',
- b'H', b'I', b'J', b'K', b'L', b'M', b'N', b'O',
- b'P', b'Q', b'R', b'S', b'T', b'U', b'V', b'W',
- b'X', b'Y', b'Z',
-
- b'{', b'|', b'}', b'~', 0x7f,
- 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
- 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f,
- 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
- 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f,
- 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
- 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf,
- 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7,
- 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf,
- 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7,
- 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf,
- 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
- 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf,
- 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7,
- 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef,
- 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7,
- 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff,
-];
-
-enum AsciiCharacterClass {
- C, // control
- Cw, // control whitespace
- W, // whitespace
- D, // digit
- L, // lowercase
- Lx, // lowercase hex digit
- U, // uppercase
- Ux, // uppercase hex digit
- P, // punctuation
-}
-use self::AsciiCharacterClass::*;
-
-static ASCII_CHARACTER_CLASS: [AsciiCharacterClass; 128] = [
-// _0 _1 _2 _3 _4 _5 _6 _7 _8 _9 _a _b _c _d _e _f
- C, C, C, C, C, C, C, C, C, Cw,Cw,C, Cw,Cw,C, C, // 0_
- C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, C, // 1_
- W, P, P, P, P, P, P, P, P, P, P, P, P, P, P, P, // 2_
- D, D, D, D, D, D, D, D, D, D, P, P, P, P, P, P, // 3_
- P, Ux,Ux,Ux,Ux,Ux,Ux,U, U, U, U, U, U, U, U, U, // 4_
- U, U, U, U, U, U, U, U, U, U, U, P, P, P, P, P, // 5_
- P, Lx,Lx,Lx,Lx,Lx,Lx,L, L, L, L, L, L, L, L, L, // 6_
- L, L, L, L, L, L, L, L, L, L, L, P, P, P, P, C, // 7_
-];
pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
// Give ourselves some scratch space to work with.
// We do not have to worry about drops: `MaybeUninit` does nothing when dropped.
- let mut tmp = MaybeUninit::<T>::uninitialized();
+ let mut tmp = MaybeUninit::<T>::uninit();
// Perform the swap
copy_nonoverlapping(x, tmp.as_mut_ptr(), 1);
while i + block_size <= len {
// Create some uninitialized memory as scratch space
// Declaring `t` here avoids aligning the stack when this loop is unused
- let mut t = mem::MaybeUninit::<Block>::uninitialized();
+ let mut t = mem::MaybeUninit::<Block>::uninit();
let t = t.as_mut_ptr() as *mut u8;
let x = x.add(i);
let y = y.add(i);
if i < len {
// Swap any remaining bytes
- let mut t = mem::MaybeUninit::<UnalignedBlock>::uninitialized();
+ let mut t = mem::MaybeUninit::<UnalignedBlock>::uninit();
let rem = len - i;
let t = t.as_mut_ptr() as *mut u8;
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn read<T>(src: *const T) -> T {
- let mut tmp = MaybeUninit::<T>::uninitialized();
+ let mut tmp = MaybeUninit::<T>::uninit();
copy_nonoverlapping(src, tmp.as_mut_ptr(), 1);
- tmp.into_initialized()
+ tmp.assume_init()
}
/// Reads the value from `src` without moving it. This leaves the
#[inline]
#[stable(feature = "ptr_unaligned", since = "1.17.0")]
pub unsafe fn read_unaligned<T>(src: *const T) -> T {
- let mut tmp = MaybeUninit::<T>::uninitialized();
+ let mut tmp = MaybeUninit::<T>::uninit();
copy_nonoverlapping(src as *const u8,
tmp.as_mut_ptr() as *mut u8,
mem::size_of::<T>());
- tmp.into_initialized()
+ tmp.assume_init()
}
/// Overwrites a memory location with the given value without reading or
/// let other_five_ref = &other_five;
///
/// assert!(five_ref == same_five_ref);
-/// assert!(five_ref == other_five_ref);
-///
/// assert!(ptr::eq(five_ref, same_five_ref));
+///
+/// assert!(five_ref == other_five_ref);
/// assert!(!ptr::eq(five_ref, other_five_ref));
/// ```
+///
+/// Slices are also compared by their length (fat pointers):
+///
+/// ```
+/// let a = [1, 2, 3];
+/// assert!(std::ptr::eq(&a[..3], &a[..3]));
+/// assert!(!std::ptr::eq(&a[..2], &a[..3]));
+/// assert!(!std::ptr::eq(&a[0..2], &a[1..3]));
+/// ```
+///
+/// Traits are also compared by their implementation:
+///
+/// ```
+/// #[repr(transparent)]
+/// struct Wrapper { member: i32 }
+///
+/// trait Trait {}
+/// impl Trait for Wrapper {}
+/// impl Trait for i32 {}
+///
+/// fn main() {
+/// let wrapper = Wrapper { member: 10 };
+///
+/// // Pointers have equal addresses.
+/// assert!(std::ptr::eq(
+/// &wrapper as *const Wrapper as *const u8,
+/// &wrapper.member as *const i32 as *const u8
+/// ));
+///
+/// // Objects have equal addresses, but `Trait` has different implementations.
+/// assert!(!std::ptr::eq(
+/// &wrapper as &dyn Trait,
+/// &wrapper.member as &dyn Trait,
+/// ));
+/// assert!(!std::ptr::eq(
+/// &wrapper as &dyn Trait as *const dyn Trait,
+/// &wrapper.member as &dyn Trait as *const dyn Trait,
+/// ));
+///
+/// // Converting the reference to a `*const u8` compares by address.
+/// assert!(std::ptr::eq(
+/// &wrapper as &dyn Trait as *const dyn Trait as *const u8,
+/// &wrapper.member as &dyn Trait as *const dyn Trait as *const u8,
+/// ));
+/// }
+/// ```
#[stable(feature = "ptr_eq", since = "1.17.0")]
#[inline]
pub fn eq<T: ?Sized>(a: *const T, b: *const T) -> bool {
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. If this is incorrect
-/// for your use case, you should include some PhantomData in your type to
+/// for your use case, you should include some [`PhantomData`] in your type to
/// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
/// Usually this won't be necessary; covariance is correct for most safe abstractions,
-/// such as Box, Rc, Arc, Vec, and LinkedList. This is the case because they
+/// such as `Box`, `Rc`, `Arc`, `Vec`, and `LinkedList`. This is the case because they
/// provide a public API that follows the normal shared XOR mutable rules of Rust.
///
/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
/// is never used for mutation.
///
+/// [`PhantomData`]: ../marker/struct.PhantomData.html
/// [`UnsafeCell<T>`]: ../cell/struct.UnsafeCell.html
#[stable(feature = "nonnull", since = "1.25.0")]
#[repr(transparent)]
}
}
- let mut rawarray = MaybeUninit::<RawArray<T>>::uninitialized();
+ let mut rawarray = MaybeUninit::<RawArray<T>>::uninit();
let buf = &mut (*rawarray.as_mut_ptr()).typed as *mut [T; 2] as *mut T;
let dim = mid.sub(left).add(right);
/// assert!(Some('×¢') == s.trim_left().chars().next());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_deprecated(reason = "superseded by `trim_start`", since = "1.33.0")]
+ #[rustc_deprecated(
+ since = "1.33.0",
+ reason = "superseded by `trim_start`",
+ suggestion = "trim_start",
+ )]
pub fn trim_left(&self) -> &str {
self.trim_start()
}
/// assert!(Some('ת') == s.trim_right().chars().rev().next());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_deprecated(reason = "superseded by `trim_end`", since = "1.33.0")]
+ #[rustc_deprecated(
+ since = "1.33.0",
+ reason = "superseded by `trim_end`",
+ suggestion = "trim_end",
+ )]
pub fn trim_right(&self) -> &str {
self.trim_end()
}
/// assert_eq!("12foo1bar12".trim_left_matches(x), "foo1bar12");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_deprecated(reason = "superseded by `trim_start_matches`", since = "1.33.0")]
+ #[rustc_deprecated(
+ since = "1.33.0",
+ reason = "superseded by `trim_start_matches`",
+ suggestion = "trim_start_matches",
+ )]
pub fn trim_left_matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> &'a str {
self.trim_start_matches(pat)
}
/// assert_eq!("1fooX".trim_right_matches(|c| c == '1' || c == 'X'), "1foo");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- #[rustc_deprecated(reason = "superseded by `trim_end_matches`", since = "1.33.0")]
+ #[rustc_deprecated(
+ since = "1.33.0",
+ reason = "superseded by `trim_end_matches`",
+ suggestion = "trim_end_matches",
+ )]
pub fn trim_right_matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> &'a str
where P::Searcher: ReverseSearcher<'a>
{
use std::cmp::Ordering::{Equal, Less, Greater};
+use std::f64::NAN;
#[test]
fn test_clone() {
}
#[test]
-fn test_tuple_cmp() {
+fn test_partial_eq() {
let (small, big) = ((1, 2, 3), (3, 2, 1));
-
- let nan = 0.0f64/0.0;
-
- // PartialEq
assert_eq!(small, small);
assert_eq!(big, big);
- assert!(small != big);
- assert!(big != small);
+ assert_ne!(small, big);
+ assert_ne!(big, small);
+}
+
+#[test]
+fn test_partial_ord() {
+ let (small, big) = ((1, 2, 3), (3, 2, 1));
- // PartialOrd
assert!(small < big);
assert!(!(small < small));
assert!(!(big < small));
assert!(big >= small);
assert!(big >= big);
- assert!(!((1.0f64, 2.0f64) < (nan, 3.0)));
- assert!(!((1.0f64, 2.0f64) <= (nan, 3.0)));
- assert!(!((1.0f64, 2.0f64) > (nan, 3.0)));
- assert!(!((1.0f64, 2.0f64) >= (nan, 3.0)));
- assert!(((1.0f64, 2.0f64) < (2.0, nan)));
- assert!(!((2.0f64, 2.0f64) < (2.0, nan)));
-
- // Ord
- assert!(small.cmp(&small) == Equal);
- assert!(big.cmp(&big) == Equal);
- assert!(small.cmp(&big) == Less);
- assert!(big.cmp(&small) == Greater);
+ assert!(!((1.0f64, 2.0f64) < (NAN, 3.0)));
+ assert!(!((1.0f64, 2.0f64) <= (NAN, 3.0)));
+ assert!(!((1.0f64, 2.0f64) > (NAN, 3.0)));
+ assert!(!((1.0f64, 2.0f64) >= (NAN, 3.0)));
+ assert!(((1.0f64, 2.0f64) < (2.0, NAN)));
+ assert!(!((2.0f64, 2.0f64) < (2.0, NAN)));
+}
+
+#[test]
+fn test_ord() {
+ let (small, big) = ((1, 2, 3), (3, 2, 1));
+ assert_eq!(small.cmp(&small), Equal);
+ assert_eq!(big.cmp(&big), Equal);
+ assert_eq!(small.cmp(&big), Less);
+ assert_eq!(big.cmp(&small), Greater);
}
#[test]
fn end($self: $S::Span) -> LineColumn;
fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
+ fn source_text($self: $S::Span) -> Option<String>;
},
}
};
self.0 == other.0
}
+ /// Returns the source text behind a span. This preserves the original source
+ /// code, including spaces and comments. It only returns a result if the span
+ /// corresponds to real source code.
+ ///
+ /// Note: The observable result of a macro should only rely on the tokens and
+ /// not on this source text. The result of this function is a best effort to
+ /// be used for diagnostics only.
+ #[unstable(feature = "proc_macro_span", issue = "54725")]
+ pub fn source_text(&self) -> Option<String> {
+ self.0.source_text()
+ }
+
diagnostic_method!(error, Level::Error);
diagnostic_method!(warning, Level::Warning);
diagnostic_method!(note, Level::Note);
[] UnsafetyCheckResult(DefId),
[] UnsafeDeriveOnReprPacked(DefId),
+ [] LintMod(DefId),
[] CheckModAttrs(DefId),
[] CheckModLoops(DefId),
[] CheckModUnstableApiUsage(DefId),
&self.forest.krate.attrs
}
- pub fn get_module(&self, module: DefId) -> (&'hir Mod, Span, NodeId)
- {
+ pub fn get_module(&self, module: DefId) -> (&'hir Mod, Span, HirId) {
let node_id = self.as_local_node_id(module).unwrap();
+ let hir_id = self.node_to_hir_id(node_id);
self.read(node_id);
match self.find_entry(node_id).unwrap().node {
Node::Item(&Item {
span,
node: ItemKind::Mod(ref m),
..
- }) => (m, span, node_id),
- Node::Crate => (&self.forest.krate.module, self.forest.krate.span, node_id),
+ }) => (m, span, hir_id),
+ Node::Crate => (&self.forest.krate.module, self.forest.krate.span, hir_id),
_ => panic!("not a module")
}
}
/// corresponding to the Node ID
pub fn attrs(&self, id: NodeId) -> &'hir [ast::Attribute] {
self.read(id); // reveals attributes on the node
- let attrs = match self.find(id) {
+ let attrs = match self.find_entry(id).map(|entry| entry.node) {
Some(Node::Local(l)) => Some(&l.attrs[..]),
Some(Node::Item(i)) => Some(&i.attrs[..]),
Some(Node::ForeignItem(fi)) => Some(&fi.attrs[..]),
// Unit/tuple structs/variants take the attributes straight from
// the struct/variant definition.
Some(Node::Ctor(..)) => return self.attrs(self.get_parent(id)),
+ Some(Node::Crate) => Some(&self.forest.krate.attrs[..]),
_ => None
};
attrs.unwrap_or(&[])
)
}
- CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()),
+ CanonicalTyVarKind::Int => self.next_int_var(),
- CanonicalTyVarKind::Float => self.tcx.mk_float_var(self.next_float_var_id()),
+ CanonicalTyVarKind::Float => self.next_float_var(),
};
ty.into()
}
-use crate::infer::type_variable::TypeVariableMap;
-use crate::ty::{self, Ty, TyCtxt};
+use crate::ty::{self, Ty, TyCtxt, TyVid, IntVid, FloatVid, RegionVid};
use crate::ty::fold::{TypeFoldable, TypeFolder};
use super::InferCtxt;
use super::RegionVariableOrigin;
+use super::type_variable::TypeVariableOrigin;
+
+use std::ops::Range;
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
/// This rather funky routine is used while processing expected
/// from `&[u32; 3]` to `&[u32]` and make the users life more
/// pleasant.
///
- /// The way we do this is using `fudge_regions_if_ok`. What the
+ /// The way we do this is using `fudge_inference_if_ok`. What the
/// routine actually does is to start a snapshot and execute the
/// closure `f`. In our example above, what this closure will do
/// is to unify the expectation (`Option<&[u32]>`) with the actual
/// with `&?a [u32]`, where `?a` is a fresh lifetime variable. The
/// input type (`?T`) is then returned by `f()`.
///
- /// At this point, `fudge_regions_if_ok` will normalize all type
+ /// At this point, `fudge_inference_if_ok` will normalize all type
/// variables, converting `?T` to `&?a [u32]` and end the
/// snapshot. The problem is that we can't just return this type
/// out, because it references the region variable `?a`, and that
/// regions in question are not particularly important. We will
/// use the expected types to guide coercions, but we will still
/// type-check the resulting types from those coercions against
- /// the actual types (`?T`, `Option<?T`) -- and remember that
+ /// the actual types (`?T`, `Option<?T>`) -- and remember that
/// after the snapshot is popped, the variable `?T` is no longer
/// unified.
- pub fn fudge_regions_if_ok<T, E, F>(&self,
- origin: &RegionVariableOrigin,
- f: F) -> Result<T, E> where
+ pub fn fudge_inference_if_ok<T, E, F>(
+ &self,
+ f: F,
+ ) -> Result<T, E> where
F: FnOnce() -> Result<T, E>,
T: TypeFoldable<'tcx>,
{
- debug!("fudge_regions_if_ok(origin={:?})", origin);
+ debug!("fudge_inference_if_ok()");
- let (type_variables, region_vars, value) = self.probe(|snapshot| {
+ let (mut fudger, value) = self.probe(|snapshot| {
match f() {
Ok(value) => {
let value = self.resolve_type_vars_if_possible(&value);
// At this point, `value` could in principle refer
- // to types/regions that have been created during
+ // to inference variables that have been created during
// the snapshot. Once we exit `probe()`, those are
// going to be popped, so we will have to
// eliminate any references to them.
- let type_variables =
- self.type_variables.borrow_mut().types_created_since_snapshot(
- &snapshot.type_snapshot);
- let region_vars =
- self.borrow_region_constraints().vars_created_since_snapshot(
- &snapshot.region_constraints_snapshot);
+ let type_vars = self.type_variables.borrow_mut().vars_since_snapshot(
+ &snapshot.type_snapshot,
+ );
+ let int_vars = self.int_unification_table.borrow_mut().vars_since_snapshot(
+ &snapshot.int_snapshot,
+ );
+ let float_vars = self.float_unification_table.borrow_mut().vars_since_snapshot(
+ &snapshot.float_snapshot,
+ );
+ let region_vars = self.borrow_region_constraints().vars_since_snapshot(
+ &snapshot.region_constraints_snapshot,
+ );
+
+ let fudger = InferenceFudger {
+ infcx: self,
+ type_vars,
+ int_vars,
+ float_vars,
+ region_vars,
+ };
- Ok((type_variables, region_vars, value))
+ Ok((fudger, value))
}
Err(e) => Err(e),
}
// Micro-optimization: if no variables have been created, then
// `value` can't refer to any of them. =) So we can just return it.
- if type_variables.is_empty() && region_vars.is_empty() {
- return Ok(value);
+ if fudger.type_vars.0.is_empty() &&
+ fudger.int_vars.is_empty() &&
+ fudger.float_vars.is_empty() &&
+ fudger.region_vars.0.is_empty() {
+ Ok(value)
+ } else {
+ Ok(value.fold_with(&mut fudger))
}
-
- let mut fudger = RegionFudger {
- infcx: self,
- type_variables: &type_variables,
- region_vars: ®ion_vars,
- origin,
- };
-
- Ok(value.fold_with(&mut fudger))
}
}
-pub struct RegionFudger<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+pub struct InferenceFudger<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- type_variables: &'a TypeVariableMap,
- region_vars: &'a Vec<ty::RegionVid>,
- origin: &'a RegionVariableOrigin,
+ type_vars: (Range<TyVid>, Vec<TypeVariableOrigin>),
+ int_vars: Range<IntVid>,
+ float_vars: Range<FloatVid>,
+ region_vars: (Range<RegionVid>, Vec<RegionVariableOrigin>),
}
-impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFudger<'a, 'gcx, 'tcx> {
+impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for InferenceFudger<'a, 'gcx, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
self.infcx.tcx
}
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
match ty.sty {
ty::Infer(ty::InferTy::TyVar(vid)) => {
- match self.type_variables.get(&vid) {
- None => {
- // This variable was created before the
- // "fudging". Since we refresh all type
- // variables to their binding anyhow, we know
- // that it is unbound, so we can just return
- // it.
- debug_assert!(self.infcx.type_variables.borrow_mut()
- .probe(vid)
- .is_unknown());
- ty
- }
-
- Some(&origin) => {
- // This variable was created during the
- // fudging. Recreate it with a fresh variable
- // here.
- self.infcx.next_ty_var(origin)
- }
+ if self.type_vars.0.contains(&vid) {
+ // This variable was created during the fudging.
+ // Recreate it with a fresh variable here.
+ let idx = (vid.index - self.type_vars.0.start.index) as usize;
+ let origin = self.type_vars.1[idx];
+ self.infcx.next_ty_var(origin)
+ } else {
+ // This variable was created before the
+ // "fudging". Since we refresh all type
+ // variables to their binding anyhow, we know
+ // that it is unbound, so we can just return
+ // it.
+ debug_assert!(self.infcx.type_variables.borrow_mut()
+ .probe(vid)
+ .is_unknown());
+ ty
+ }
+ }
+ ty::Infer(ty::InferTy::IntVar(vid)) => {
+ if self.int_vars.contains(&vid) {
+ self.infcx.next_int_var()
+ } else {
+ ty
+ }
+ }
+ ty::Infer(ty::InferTy::FloatVar(vid)) => {
+ if self.float_vars.contains(&vid) {
+ self.infcx.next_float_var()
+ } else {
+ ty
}
}
_ => ty.super_fold_with(self),
}
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
- match *r {
- ty::ReVar(v) if self.region_vars.contains(&v) => {
- self.infcx.next_region_var(self.origin.clone())
- }
- _ => {
- r
+ if let ty::ReVar(vid) = r {
+ if self.region_vars.0.contains(&vid) {
+ let idx = (vid.index() - self.region_vars.0.start.index()) as usize;
+ let origin = self.region_vars.1[idx];
+ return self.infcx.next_region_var(origin);
}
}
+ r
}
}
self.tcx.mk_ty_var(self.next_ty_var_id(true, origin))
}
- pub fn next_int_var_id(&self) -> IntVid {
+ fn next_int_var_id(&self) -> IntVid {
self.int_unification_table.borrow_mut().new_key(None)
}
- pub fn next_float_var_id(&self) -> FloatVid {
+ pub fn next_int_var(&self) -> Ty<'tcx> {
+ self.tcx.mk_int_var(self.next_int_var_id())
+ }
+
+ fn next_float_var_id(&self) -> FloatVid {
self.float_unification_table.borrow_mut().new_key(None)
}
+ pub fn next_float_var(&self) -> Ty<'tcx> {
+ self.tcx.mk_float_var(self.next_float_var_id())
+ }
+
/// Creates a fresh region variable with the next available index.
/// The variable will be created in the maximum universe created
/// thus far, allowing it to name any region created thus far.
use std::collections::BTreeMap;
use std::{cmp, fmt, mem, u32};
+use std::ops::Range;
mod leak_check;
}
}
- pub fn vars_created_since_snapshot(&self, mark: &RegionSnapshot) -> Vec<RegionVid> {
- self.undo_log[mark.length..]
- .iter()
- .filter_map(|&elt| match elt {
- AddVar(vid) => Some(vid),
- _ => None,
- }).collect()
+ pub fn vars_since_snapshot(
+ &self,
+ mark: &RegionSnapshot,
+ ) -> (Range<RegionVid>, Vec<RegionVariableOrigin>) {
+ let range = self.unification_table.vars_since_snapshot(&mark.region_snapshot);
+ (range.clone(), (range.start.index()..range.end.index()).map(|index| {
+ self.var_infos[ty::RegionVid::from(index)].origin.clone()
+ }).collect())
}
/// See [`RegionInference::region_constraints_added_in_snapshot`].
use syntax::symbol::InternedString;
use syntax_pos::Span;
-use crate::ty::{self, Ty};
+use crate::ty::{self, Ty, TyVid};
use std::cmp;
use std::marker::PhantomData;
use std::u32;
-use rustc_data_structures::fx::FxHashMap;
+use std::ops::Range;
use rustc_data_structures::snapshot_vec as sv;
use rustc_data_structures::unify as ut;
Generalized(ty::TyVid),
}
-pub type TypeVariableMap = FxHashMap<ty::TyVid, TypeVariableOrigin>;
-
struct TypeVariableData {
origin: TypeVariableOrigin,
diverging: bool,
self.sub_relations.commit(sub_snapshot);
}
- /// Returns a map `{V1 -> V2}`, where the keys `{V1}` are
- /// ty-variables created during the snapshot, and the values
- /// `{V2}` are the root variables that they were unified with,
- /// along with their origin.
- pub fn types_created_since_snapshot(&mut self, s: &Snapshot<'tcx>) -> TypeVariableMap {
- let actions_since_snapshot = self.values.actions_since_snapshot(&s.snapshot);
-
- actions_since_snapshot
- .iter()
- .filter_map(|action| match action {
- &sv::UndoLog::NewElem(index) => Some(ty::TyVid { index: index as u32 }),
- _ => None,
- })
- .map(|vid| {
- let origin = self.values.get(vid.index as usize).origin.clone();
- (vid, origin)
- })
- .collect()
+ /// Returns a range of the type variables created during the snapshot.
+ pub fn vars_since_snapshot(
+ &mut self,
+ s: &Snapshot<'tcx>,
+ ) -> (Range<TyVid>, Vec<TypeVariableOrigin>) {
+ let range = self.eq_relations.vars_since_snapshot(&s.eq_snapshot);
+ (range.start.vid..range.end.vid, (range.start.vid.index..range.end.vid.index).map(|index| {
+ self.values.get(index as usize).origin.clone()
+ }).collect())
}
/// Finds the set of type variables that existed *before* `s`
#![feature(non_exhaustive)]
#![feature(proc_macro_internals)]
#![feature(optin_builtin_traits)]
+#![feature(range_is_empty)]
#![feature(refcell_replace_swap)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_attrs)]
use crate::session::{config, early_error, Session};
use crate::ty::{self, TyCtxt, Ty};
use crate::ty::layout::{LayoutError, LayoutOf, TyLayout};
+use crate::ty::query::Providers;
use crate::util::nodemap::FxHashMap;
use crate::util::common::time;
use syntax_pos::{MultiSpan, Span, symbol::{LocalInternedString, Symbol}};
use errors::DiagnosticBuilder;
use crate::hir;
-use crate::hir::def_id::LOCAL_CRATE;
+use crate::hir::def_id::{DefId, LOCAL_CRATE};
use crate::hir::intravisit as hir_visit;
+use crate::hir::intravisit::Visitor;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::visit as ast_visit;
pre_expansion_passes: Option<Vec<EarlyLintPassObject>>,
early_passes: Option<Vec<EarlyLintPassObject>>,
late_passes: Option<Vec<LateLintPassObject>>,
+ late_module_passes: Option<Vec<LateLintPassObject>>,
/// Lints indexed by name.
by_name: FxHashMap<String, TargetLint>,
pre_expansion_passes: Some(vec![]),
early_passes: Some(vec![]),
late_passes: Some(vec![]),
+ late_module_passes: Some(vec![]),
by_name: Default::default(),
future_incompatible: Default::default(),
lint_groups: Default::default(),
pub fn register_late_pass(&mut self,
sess: Option<&Session>,
from_plugin: bool,
+ per_module: bool,
pass: LateLintPassObject) {
self.push_pass(sess, from_plugin, &pass);
- self.late_passes.as_mut().unwrap().push(pass);
+ if per_module {
+ self.late_module_passes.as_mut().unwrap().push(pass);
+ } else {
+ self.late_passes.as_mut().unwrap().push(pass);
+ }
}
// Helper method for register_early/late_pass
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
/// Side-tables for the body we are in.
+ // FIXME: Make this lazy to avoid running the TypeckTables query?
pub tables: &'a ty::TypeckTables<'tcx>,
/// Parameter environment for the item we are in.
/// Generic type parameters in scope for the item we are in.
pub generics: Option<&'tcx hir::Generics>,
+
+ /// We are only looking at one module
+ only_module: bool,
}
/// Context for lint checking of the AST, after expansion, before lowering to
pub fn current_lint_root(&self) -> hir::HirId {
self.last_node_with_lint_attrs
}
+
+ fn process_mod(&mut self, m: &'tcx hir::Mod, s: Span, n: hir::HirId) {
+ run_lints!(self, check_mod, m, s, n);
+ hir_visit::walk_mod(self, m, n);
+ run_lints!(self, check_mod_post, m, s, n);
+ }
}
impl<'a, 'tcx> LayoutOf for LateContext<'a, 'tcx> {
}
fn visit_mod(&mut self, m: &'tcx hir::Mod, s: Span, n: hir::HirId) {
- run_lints!(self, check_mod, m, s, n);
- hir_visit::walk_mod(self, m, n);
- run_lints!(self, check_mod_post, m, s, n);
+ if !self.only_module {
+ self.process_mod(m, s, n);
+ }
}
fn visit_local(&mut self, l: &'tcx hir::Local) {
}
}
+pub fn lint_mod<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) {
+ let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
-/// Performs lint checking on a crate.
-///
-/// Consumes the `lint_store` field of the `Session`.
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+ let store = &tcx.sess.lint_store;
+ let passes = store.borrow_mut().late_module_passes.take();
+
+ let mut cx = LateContext {
+ tcx,
+ tables: &ty::TypeckTables::empty(None),
+ param_env: ty::ParamEnv::empty(),
+ access_levels,
+ lint_sess: LintSession {
+ lints: store.borrow(),
+ passes,
+ },
+ last_node_with_lint_attrs: tcx.hir().as_local_hir_id(module_def_id).unwrap(),
+ generics: None,
+ only_module: true,
+ };
+
+ let (module, span, hir_id) = tcx.hir().get_module(module_def_id);
+ cx.process_mod(module, span, hir_id);
+
+ // Visit the crate attributes
+ if hir_id == hir::CRATE_HIR_ID {
+ walk_list!(cx, visit_attribute, cx.tcx.hir().attrs_by_hir_id(hir::CRATE_HIR_ID));
+ }
+
+ // Put the lint store levels and passes back in the session.
+ let passes = cx.lint_sess.passes;
+ drop(cx.lint_sess.lints);
+ store.borrow_mut().late_module_passes = passes;
+}
+
+pub(crate) fn provide(providers: &mut Providers<'_>) {
+ *providers = Providers {
+ lint_mod,
+ ..*providers
+ };
+}
+
+fn lint_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
let krate = tcx.hir().krate();
},
last_node_with_lint_attrs: hir::CRATE_HIR_ID,
generics: None,
+ only_module: false,
};
// Visit the whole crate.
tcx.sess.lint_store.borrow_mut().late_passes = passes;
}
+/// Performs lint checking on a crate.
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
+ // Run per-module lints
+ for &module in tcx.hir().krate().modules.keys() {
+ tcx.ensure().lint_mod(tcx.hir().local_def_id(module));
+ }
+
+ // Run whole crate non-incremental lints
+ lint_crate(tcx);
+}
+
struct EarlyLintPassObjects<'a> {
lints: &'a mut [EarlyLintPassObject],
}
pub fn provide(providers: &mut Providers<'_>) {
providers.lint_levels = lint_levels;
+ context::provide(providers);
}
/// Returns whether `span` originates in a foreign crate's external macro.
}
}
- /// Returns `None` in case the `AllocId` is dangling. An `EvalContext` can still have a
+ /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
/// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
/// illegal and will likely ICE.
/// This function exists to allow const eval to detect the difference between evaluation-
},
Other {
+ [] fn lint_mod: LintMod(DefId) -> (),
+
/// Checks the attributes in the module
[] fn check_mod_attrs: CheckModAttrs(DefId) -> (),
DepKind::MirBorrowCheck => { force!(mir_borrowck, def_id!()); }
DepKind::UnsafetyCheckResult => { force!(unsafety_check_result, def_id!()); }
DepKind::UnsafeDeriveOnReprPacked => { force!(unsafe_derive_on_repr_packed, def_id!()); }
+ DepKind::LintMod => { force!(lint_mod, def_id!()); }
DepKind::CheckModAttrs => { force!(check_mod_attrs, def_id!()); }
DepKind::CheckModLoops => { force!(check_mod_loops, def_id!()); }
DepKind::CheckModUnstableApiUsage => { force!(check_mod_unstable_api_usage, def_id!()); }
crate-type = ["dylib"]
[dependencies]
-ena = "0.11"
+ena = "0.13"
log = "0.4"
jobserver_crate = { version = "0.1", package = "jobserver" }
lazy_static = "1"
authors = ["The Rust Project Developers"]
name = "rustc_driver"
version = "0.0.0"
+edition = "2018"
[lib]
name = "rustc_driver"
graphviz = { path = "../libgraphviz" }
log = "0.4"
env_logger = { version = "0.5", default-features = false }
-rustc-rayon = "0.1.2"
+rayon = { version = "0.1.2", package = "rustc-rayon" }
scoped-tls = "1.0"
rustc = { path = "../librustc" }
rustc_allocator = { path = "../librustc_allocator" }
rustc_target = { path = "../librustc_target" }
rustc_borrowck = { path = "../librustc_borrowck" }
rustc_data_structures = { path = "../librustc_data_structures" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
rustc_incremental = { path = "../librustc_incremental" }
rustc_lint = { path = "../librustc_lint" }
rustc_metadata = { path = "../librustc_metadata" }
#![recursion_limit="256"]
-extern crate arena;
+#![deny(rust_2018_idioms)]
+
pub extern crate getopts;
-extern crate graphviz;
-extern crate env_logger;
#[cfg(unix)]
extern crate libc;
-extern crate rustc_rayon as rayon;
-extern crate rustc;
-extern crate rustc_allocator;
-extern crate rustc_target;
-extern crate rustc_borrowck;
-extern crate rustc_data_structures;
-extern crate rustc_errors as errors;
-extern crate rustc_passes;
-extern crate rustc_lint;
-extern crate rustc_plugin;
-extern crate rustc_privacy;
-extern crate rustc_incremental;
-extern crate rustc_metadata;
-extern crate rustc_mir;
-extern crate rustc_resolve;
-extern crate rustc_save_analysis;
-extern crate rustc_traits;
-extern crate rustc_codegen_utils;
-extern crate rustc_typeck;
-extern crate rustc_interface;
-extern crate scoped_tls;
-extern crate serialize;
-extern crate smallvec;
#[macro_use]
extern crate log;
-extern crate syntax;
-extern crate syntax_ext;
-extern crate syntax_pos;
use pretty::{PpMode, UserIdentifiedItem};
pub use self::PpSourceMode::*;
pub use self::PpMode::*;
use self::NodesMatchingUII::*;
-use abort_on_err;
+use crate::abort_on_err;
-use source_name;
+use crate::source_name;
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum PpSourceMode {
tcx: TyCtxt<'tcx, 'tcx, 'tcx>,
f: F
) -> A
- where F: FnOnce(&dyn HirPrinterSupport, &hir::Crate) -> A
+ where F: FnOnce(&dyn HirPrinterSupport<'_>, &hir::Crate) -> A
{
match *self {
PpmNormal => {
impl<'hir> pprust::PpAnn for NoAnn<'hir> {}
impl<'hir> pprust_hir::PpAnn for NoAnn<'hir> {
- fn nested(&self, state: &mut pprust_hir::State, nested: pprust_hir::Nested)
+ fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested)
-> io::Result<()> {
if let Some(tcx) = self.tcx {
pprust_hir::PpAnn::nested(tcx.hir(), state, nested)
}
impl<'hir> pprust::PpAnn for IdentifiedAnnotation<'hir> {
- fn pre(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
+ fn pre(&self, s: &mut pprust::State<'_>, node: pprust::AnnNode<'_>) -> io::Result<()> {
match node {
pprust::AnnNode::Expr(_) => s.popen(),
_ => Ok(()),
}
}
- fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
+ fn post(&self, s: &mut pprust::State<'_>, node: pprust::AnnNode<'_>) -> io::Result<()> {
match node {
pprust::AnnNode::Ident(_) |
pprust::AnnNode::Name(_) => Ok(()),
}
impl<'hir> pprust_hir::PpAnn for IdentifiedAnnotation<'hir> {
- fn nested(&self, state: &mut pprust_hir::State, nested: pprust_hir::Nested)
+ fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested)
-> io::Result<()> {
if let Some(ref tcx) = self.tcx {
pprust_hir::PpAnn::nested(tcx.hir(), state, nested)
Ok(())
}
}
- fn pre(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
+ fn pre(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) -> io::Result<()> {
match node {
pprust_hir::AnnNode::Expr(_) => s.popen(),
_ => Ok(()),
}
}
- fn post(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
+ fn post(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) -> io::Result<()> {
match node {
pprust_hir::AnnNode::Name(_) => Ok(()),
pprust_hir::AnnNode::Item(item) => {
}
impl<'a> pprust::PpAnn for HygieneAnnotation<'a> {
- fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
+ fn post(&self, s: &mut pprust::State<'_>, node: pprust::AnnNode<'_>) -> io::Result<()> {
match node {
pprust::AnnNode::Ident(&ast::Ident { name, span }) => {
s.s.space()?;
}
impl<'a, 'tcx> pprust_hir::PpAnn for TypedAnnotation<'a, 'tcx> {
- fn nested(&self, state: &mut pprust_hir::State, nested: pprust_hir::Nested)
+ fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested)
-> io::Result<()> {
let old_tables = self.tables.get();
if let pprust_hir::Nested::Body(id) = nested {
self.tables.set(old_tables);
Ok(())
}
- fn pre(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
+ fn pre(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) -> io::Result<()> {
match node {
pprust_hir::AnnNode::Expr(_) => s.popen(),
_ => Ok(()),
}
}
- fn post(&self, s: &mut pprust_hir::State, node: pprust_hir::AnnNode) -> io::Result<()> {
+ fn post(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) -> io::Result<()> {
match node {
pprust_hir::AnnNode::Expr(expr) => {
s.s.space()?;
}
}
- fn to_one_node_id(self, user_option: &str, sess: &Session, map: &hir_map::Map) -> ast::NodeId {
+ fn to_one_node_id(self,
+ user_option: &str,
+ sess: &Session,
+ map: &hir_map::Map<'_>)
+ -> ast::NodeId {
let fail_because = |is_wrong_because| -> ast::NodeId {
let message = format!("{} needs NodeId (int) or unique path suffix (b::c::d); got \
{}, which {}",
fn safe_remove_file(p: &Path) -> io::Result<()> {
if p.exists() {
let canonicalized = p.canonicalize()?;
- std_fs::remove_file(canonicalized)
+ match std_fs::remove_file(canonicalized) {
+ Err(ref err) if err.kind() == io::ErrorKind::NotFound => Ok(()),
+ result => result,
+ }
} else {
Ok(())
}
authors = ["The Rust Project Developers"]
name = "rustc_interface"
version = "0.0.0"
+edition = "2018"
[lib]
name = "rustc_interface"
[dependencies]
log = "0.4"
-rustc-rayon = "0.1.1"
+rayon = { version = "0.1.1", package = "rustc-rayon" }
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
scoped-tls = "1.0"
syntax = { path = "../libsyntax" }
-use queries::Queries;
+use crate::queries::Queries;
+use crate::util;
+use crate::profile;
+pub use crate::passes::BoxedResolver;
+
use rustc::lint;
use rustc::session::config::{self, Input};
use rustc::session::{DiagnosticOutput, Session};
use std::sync::{Arc, Mutex};
use syntax;
use syntax::source_map::{FileLoader, SourceMap};
-use util;
-use profile;
-
-pub use passes::BoxedResolver;
pub type Result<T> = result::Result<T, ErrorReported>;
#![feature(generators)]
#![cfg_attr(unix, feature(libc))]
+#![deny(rust_2018_idioms)]
+
#![allow(unused_imports)]
#![recursion_limit="256"]
#[cfg(unix)]
extern crate libc;
-#[macro_use]
-extern crate log;
-extern crate rustc;
-extern crate rustc_codegen_utils;
-extern crate rustc_allocator;
-extern crate rustc_borrowck;
-extern crate rustc_incremental;
-extern crate rustc_traits;
-#[macro_use]
-extern crate rustc_data_structures;
-extern crate rustc_errors;
-extern crate rustc_lint;
-extern crate rustc_metadata;
-extern crate rustc_mir;
-extern crate rustc_passes;
-extern crate rustc_plugin;
-extern crate rustc_privacy;
-extern crate rustc_rayon as rayon;
-extern crate rustc_resolve;
-extern crate rustc_typeck;
-extern crate smallvec;
-extern crate serialize;
-extern crate syntax;
-extern crate syntax_pos;
-extern crate syntax_ext;
pub mod interface;
mod passes;
-use interface::{Compiler, Result};
-use util;
-use proc_macro_decls;
+use crate::interface::{Compiler, Result};
+use crate::util;
+use crate::proc_macro_decls;
+use log::{debug, info, warn, log_enabled};
use rustc::dep_graph::DepGraph;
use rustc::hir;
use rustc::hir::lowering::lower_crate;
use rustc_allocator as allocator;
use rustc_borrowck as borrowck;
use rustc_codegen_utils::codegen_backend::CodegenBackend;
+use rustc_data_structures::{box_region_allow_access, declare_box_region_type, parallel};
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_data_structures::sync::{Lrc, ParallelIterator, par_iter};
ls.register_early_pass(Some(sess), true, false, pass);
}
for pass in late_lint_passes {
- ls.register_late_pass(Some(sess), true, pass);
+ ls.register_late_pass(Some(sess), true, false, pass);
}
for (name, (to, deprecated_name)) in lint_groups {
Ok(outputs)
}
-pub fn default_provide(providers: &mut ty::query::Providers) {
+pub fn default_provide(providers: &mut ty::query::Providers<'_>) {
providers.analysis = analysis;
proc_macro_decls::provide(providers);
plugin::build::provide(providers);
lint::provide(providers);
}
-pub fn default_provide_extern(providers: &mut ty::query::Providers) {
+pub fn default_provide_extern(providers: &mut ty::query::Providers<'_>) {
cstore::provide_extern(providers);
}
+use log::debug;
+use rustc::dep_graph::DepNode;
use rustc::session::Session;
use rustc::util::common::{ProfQDumpParams, ProfileQueriesMsg, profq_msg, profq_set_chan};
use std::sync::mpsc::{Receiver};
use std::io::{Write};
-use rustc::dep_graph::{DepNode};
use std::time::{Duration, Instant};
pub mod trace;
-use interface::{Compiler, Result};
-use passes::{self, BoxedResolver, ExpansionResult, BoxedGlobalCtxt, PluginInfo};
+use crate::interface::{Compiler, Result};
+use crate::passes::{self, BoxedResolver, ExpansionResult, BoxedGlobalCtxt, PluginInfo};
+
use rustc_incremental::DepGraphFuture;
use rustc_data_structures::sync::Lrc;
use rustc::session::config::{Input, OutputFilenames, OutputType};
+use log::info;
use rustc::session::config::{Input, OutputFilenames, ErrorOutputType};
use rustc::session::{self, config, early_error, filesearch, Session, DiagnosticOutput};
use rustc::session::CrateDisambiguator;
promoted: None
};
// trigger the query once for all constants since that will already report the errors
+ // FIXME: Use ensure here
let _ = cx.tcx.const_eval(param_env.and(cid));
}
store.register_early_pass(sess, false, true, box BuiltinCombinedEarlyLintPass::new());
}
- late_lint_methods!(declare_combined_late_lint_pass, [BuiltinCombinedLateLintPass, [
+ late_lint_methods!(declare_combined_late_lint_pass, [BuiltinCombinedModuleLateLintPass, [
HardwiredLints: HardwiredLints,
WhileTrue: WhileTrue,
ImproperCTypes: ImproperCTypes,
VariantSizeDifferences: VariantSizeDifferences,
BoxPointers: BoxPointers,
- UnusedAttributes: UnusedAttributes,
PathStatements: PathStatements,
+
+ // Depends on referenced function signatures in expressions
UnusedResults: UnusedResults,
- NonSnakeCase: NonSnakeCase,
+
NonUpperCaseGlobals: NonUpperCaseGlobals,
NonShorthandFieldPatterns: NonShorthandFieldPatterns,
UnusedAllocation: UnusedAllocation,
+
+ // Depends on types used in type definitions
MissingCopyImplementations: MissingCopyImplementations,
- UnstableFeatures: UnstableFeatures,
- InvalidNoMangleItems: InvalidNoMangleItems,
+
PluginAsLibrary: PluginAsLibrary,
+
+ // Depends on referenced function signatures in expressions
MutableTransmutes: MutableTransmutes,
+
+ // Depends on types of fields, checks if they implement Drop
UnionsWithDropFields: UnionsWithDropFields,
- UnreachablePub: UnreachablePub,
- UnnameableTestItems: UnnameableTestItems::new(),
+
TypeAliasBounds: TypeAliasBounds,
- UnusedBrokenConst: UnusedBrokenConst,
+
TrivialConstraints: TrivialConstraints,
TypeLimits: TypeLimits::new(),
+
+ NonSnakeCase: NonSnakeCase,
+ InvalidNoMangleItems: InvalidNoMangleItems,
+
+ // Depends on access levels
+ UnreachablePub: UnreachablePub,
+
+ ExplicitOutlivesRequirements: ExplicitOutlivesRequirements,
+ ]], ['tcx]);
+
+ store.register_late_pass(sess, false, true, box BuiltinCombinedModuleLateLintPass::new());
+
+ late_lint_methods!(declare_combined_late_lint_pass, [BuiltinCombinedLateLintPass, [
+ // FIXME: Look into regression when this is used as a module lint
+ // May Depend on constants elsewhere
+ UnusedBrokenConst: UnusedBrokenConst,
+
+ // Uses attr::is_used which is untracked, can't be an incremental module pass.
+ UnusedAttributes: UnusedAttributes,
+
+ // Needs to run after UnusedAttributes as it marks all `feature` attributes as used.
+ UnstableFeatures: UnstableFeatures,
+
+ // Tracks state across modules
+ UnnameableTestItems: UnnameableTestItems::new(),
+
+ // Tracks attributes of parents
MissingDoc: MissingDoc::new(),
+
+ // Depends on access levels
+ // FIXME: Turn the computation of types which implement Debug into a query
+ // and change this to a module lint pass
MissingDebugImplementations: MissingDebugImplementations::new(),
- ExplicitOutlivesRequirements: ExplicitOutlivesRequirements,
]], ['tcx]);
- store.register_late_pass(sess, false, box BuiltinCombinedLateLintPass::new());
+ store.register_late_pass(sess, false, false, box BuiltinCombinedLateLintPass::new());
add_lint_group!(sess,
"nonstandard_style",
}
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonSnakeCase {
- fn check_crate(&mut self, cx: &LateContext<'_, '_>, cr: &hir::Crate) {
+ fn check_mod(&mut self, cx: &LateContext<'_, '_>, _: &'tcx hir::Mod, _: Span, id: hir::HirId) {
+ if id != hir::CRATE_HIR_ID {
+ return;
+ }
+
let crate_ident = if let Some(name) = &cx.tcx.sess.opts.crate_name {
Some(Ident::from_str(name))
} else {
- attr::find_by_name(&cr.attrs, "crate_name")
+ attr::find_by_name(&cx.tcx.hir().attrs_by_hir_id(hir::CRATE_HIR_ID), "crate_name")
.and_then(|attr| attr.meta())
.and_then(|meta| {
meta.name_value_literal().and_then(|lit| {
use crate::interpret::{self,
PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar, Pointer,
RawConst, ConstValue,
- EvalResult, EvalError, EvalErrorKind, GlobalId, EvalContext, StackPopCleanup,
+ EvalResult, EvalError, EvalErrorKind, GlobalId, InterpretCx, StackPopCleanup,
Allocation, AllocId, MemoryKind,
snapshot, RefTracking,
};
/// Should be a power of two for performance reasons.
const DETECTOR_SNAPSHOT_PERIOD: isize = 256;
-/// The `EvalContext` is only meant to be used to do field and index projections into constants for
+/// The `InterpretCx` is only meant to be used to do field and index projections into constants for
/// `simd_shuffle` and const patterns in match arms.
///
/// The function containing the `match` that is currently being analyzed may have generic bounds
param_env: ty::ParamEnv<'tcx>,
) -> CompileTimeEvalContext<'a, 'mir, 'tcx> {
debug!("mk_eval_cx: {:?}", param_env);
- EvalContext::new(tcx.at(span), param_env, CompileTimeInterpreter::new())
+ InterpretCx::new(tcx.at(span), param_env, CompileTimeInterpreter::new())
}
pub(crate) fn eval_promoted<'a, 'mir, 'tcx>(
// and try improving it down the road when more information is available
let span = tcx.def_span(cid.instance.def_id());
let span = mir.map(|mir| mir.span).unwrap_or(span);
- let mut ecx = EvalContext::new(tcx.at(span), param_env, CompileTimeInterpreter::new());
+ let mut ecx = InterpretCx::new(tcx.at(span), param_env, CompileTimeInterpreter::new());
let r = eval_body_using_ecx(&mut ecx, cid, mir, param_env);
(r, ecx)
}
}
type CompileTimeEvalContext<'a, 'mir, 'tcx> =
- EvalContext<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>;
+ InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>;
impl interpret::MayLeak for ! {
#[inline(always)]
const STATIC_KIND: Option<!> = None; // no copying of statics allowed
#[inline(always)]
- fn enforce_validity(_ecx: &EvalContext<'a, 'mir, 'tcx, Self>) -> bool {
+ fn enforce_validity(_ecx: &InterpretCx<'a, 'mir, 'tcx, Self>) -> bool {
false // for now, we don't enforce validity
}
fn find_fn(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
dest: Option<PlaceTy<'tcx>>,
}
fn call_intrinsic(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
dest: PlaceTy<'tcx>,
}
fn ptr_op(
- _ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
+ _ecx: &InterpretCx<'a, 'mir, 'tcx, Self>,
_bin_op: mir::BinOp,
_left: ImmTy<'tcx>,
_right: ImmTy<'tcx>,
}
fn box_alloc(
- _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
_dest: PlaceTy<'tcx>,
) -> EvalResult<'tcx> {
Err(
)
}
- fn before_terminator(ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx> {
+ fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx> {
{
let steps = &mut ecx.machine.steps_since_detector_enabled;
#[inline(always)]
fn tag_new_allocation(
- _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
ptr: Pointer,
_kind: MemoryKind<Self::MemoryKinds>,
) -> Pointer {
#[inline(always)]
fn stack_push(
- _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
) -> EvalResult<'tcx> {
Ok(())
}
/// Called immediately before a stack frame gets popped.
#[inline(always)]
fn stack_pop(
- _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
_extra: (),
) -> EvalResult<'tcx> {
Ok(())
}
pub fn error_to_const_error<'a, 'mir, 'tcx>(
- ecx: &EvalContext<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
mut error: EvalError<'tcx>
) -> ConstEvalErr<'tcx> {
error.print_backtrace();
use rustc::mir::CastKind;
use rustc_apfloat::Float;
-use super::{EvalContext, Machine, PlaceTy, OpTy, ImmTy, Immediate};
+use super::{InterpretCx, Machine, PlaceTy, OpTy, ImmTy, Immediate};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
match ty.sty {
ty::RawPtr(ty::TypeAndMut { ty, .. }) |
Memory, Machine
};
-pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
+pub struct InterpretCx<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
/// Stores the `Machine` instance.
pub machine: M,
}
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
- for EvalContext<'a, 'mir, 'tcx, M>
+ for InterpretCx<'a, 'mir, 'tcx, M>
{
#[inline]
fn data_layout(&self) -> &layout::TargetDataLayout {
}
}
-impl<'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for EvalContext<'a, 'mir, 'tcx, M>
+impl<'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpretCx<'a, 'mir, 'tcx, M>
where M: Machine<'a, 'mir, 'tcx>
{
#[inline]
}
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
- for EvalContext<'a, 'mir, 'tcx, M>
+ for InterpretCx<'a, 'mir, 'tcx, M>
{
type Ty = Ty<'tcx>;
type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
}
}
-impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
pub fn new(
tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
machine: M,
) -> Self {
- EvalContext {
+ InterpretCx {
machine,
tcx,
param_env,
};
use super::{
- Machine, PlaceTy, OpTy, EvalContext,
+ Machine, PlaceTy, OpTy, InterpretCx,
};
Ok(Scalar::from_uint(bits_out, size))
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
/// Returns `true` if emulation happened.
pub fn emulate_intrinsic(
&mut self,
use super::{
Allocation, AllocId, EvalResult, Scalar, AllocationExtra,
- EvalContext, PlaceTy, MPlaceTy, OpTy, ImmTy, Pointer, MemoryKind,
+ InterpretCx, PlaceTy, MPlaceTy, OpTy, ImmTy, Pointer, MemoryKind,
};
/// Whether this kind of memory is allowed to leak
const STATIC_KIND: Option<Self::MemoryKinds>;
/// Whether to enforce the validity invariant
- fn enforce_validity(ecx: &EvalContext<'a, 'mir, 'tcx, Self>) -> bool;
+ fn enforce_validity(ecx: &InterpretCx<'a, 'mir, 'tcx, Self>) -> bool;
/// Called before a basic block terminator is executed.
/// You can use this to detect endlessly running programs.
- fn before_terminator(ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx>;
+ fn before_terminator(ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx>;
/// Entry point to all function calls.
///
/// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
/// was used.
fn find_fn(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Self::PointerTag>],
dest: Option<PlaceTy<'tcx, Self::PointerTag>>,
/// Directly process an intrinsic without pushing a stack frame.
/// If this returns successfully, the engine will take care of jumping to the next block.
fn call_intrinsic(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Self::PointerTag>],
dest: PlaceTy<'tcx, Self::PointerTag>,
///
/// Returns a (value, overflowed) pair if the operation succeeded
fn ptr_op(
- ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, Self>,
bin_op: mir::BinOp,
left: ImmTy<'tcx, Self::PointerTag>,
right: ImmTy<'tcx, Self::PointerTag>,
/// Heap allocations via the `box` keyword.
fn box_alloc(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
dest: PlaceTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx>;
/// Adds the tag for a newly allocated pointer.
fn tag_new_allocation(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
ptr: Pointer,
kind: MemoryKind<Self::MemoryKinds>,
) -> Pointer<Self::PointerTag>;
/// `mutability` can be `None` in case a raw ptr is being dereferenced.
#[inline]
fn tag_dereference(
- _ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
+ _ecx: &InterpretCx<'a, 'mir, 'tcx, Self>,
place: MPlaceTy<'tcx, Self::PointerTag>,
_mutability: Option<hir::Mutability>,
) -> EvalResult<'tcx, Scalar<Self::PointerTag>> {
/// Executes a retagging operation
#[inline]
fn retag(
- _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ _ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
_kind: mir::RetagKind,
_place: PlaceTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx> {
/// Called immediately before a new stack frame got pushed
fn stack_push(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
) -> EvalResult<'tcx, Self::FrameExtra>;
/// Called immediately after a stack frame gets popped
fn stack_pop(
- ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
+ ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>,
extra: Self::FrameExtra,
) -> EvalResult<'tcx>;
}
pub use rustc::mir::interpret::*; // have all the `interpret` symbols in one place: here
pub use self::eval_context::{
- EvalContext, Frame, StackPopCleanup, LocalState, LocalValue,
+ InterpretCx, Frame, StackPopCleanup, LocalState, LocalValue,
};
pub use self::place::{Place, PlaceTy, MemPlace, MPlaceTy};
sign_extend, truncate,
};
use super::{
- EvalContext, Machine,
+ InterpretCx, Machine,
MemPlace, MPlaceTy, PlaceTy, Place, MemoryKind,
};
pub use rustc::mir::interpret::ScalarMaybeUndef;
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
/// Try reading an immediate in memory; this is interesting particularly for ScalarPair.
/// Returns `None` if the layout does not permit loading this as a value.
fn try_read_immediate_from_mplace(
use rustc_apfloat::Float;
use rustc::mir::interpret::{EvalResult, Scalar};
-use super::{EvalContext, PlaceTy, Immediate, Machine, ImmTy};
+use super::{InterpretCx, PlaceTy, Immediate, Machine, ImmTy};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
/// Applies the binary operation `op` to the two operands and writes a tuple of the result
/// and a boolean signifying the potential overflow to the destination.
pub fn binop_with_overflow(
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
fn binary_char_op(
&self,
bin_op: mir::BinOp,
use super::{
GlobalId, AllocId, Allocation, Scalar, EvalResult, Pointer, PointerArithmetic,
- EvalContext, Machine, AllocMap, AllocationExtra,
+ InterpretCx, Machine, AllocMap, AllocationExtra,
RawConst, Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
};
}
// separating the pointer tag for `impl Trait`, see https://github.com/rust-lang/rust/issues/54385
-impl<'a, 'mir, 'tcx, Tag, M> EvalContext<'a, 'mir, 'tcx, M>
+impl<'a, 'mir, 'tcx, Tag, M> InterpretCx<'a, 'mir, 'tcx, M>
where
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
Tag: ::std::fmt::Debug+Default+Copy+Eq+Hash+'static,
// global table but not in its local memory: It calls back into tcx through
// a query, triggering the CTFE machinery to actually turn this lazy reference
// into a bunch of bytes. IOW, statics are evaluated with CTFE even when
- // this EvalContext uses another Machine (e.g., in miri). This is what we
+ // this InterpretCx uses another Machine (e.g., in miri). This is what we
// want! This way, computing statics works concistently between codegen
// and miri: They use the same query to eventually obtain a `ty::Const`
// and use that for further computation.
-//! This module contains the `EvalContext` methods for executing a single step of the interpreter.
+//! This module contains the `InterpretCx` methods for executing a single step of the interpreter.
//!
//! The main entry point is the `step` method.
use rustc::ty::layout::LayoutOf;
use rustc::mir::interpret::{EvalResult, Scalar, PointerArithmetic};
-use super::{EvalContext, Machine};
+use super::{InterpretCx, Machine};
/// Classify whether an operator is "left-homogeneous", i.e., the LHS has the
/// same type as the result.
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
pub fn run(&mut self) -> EvalResult<'tcx> {
while self.step()? {}
Ok(())
use rustc::mir::interpret::{EvalResult, PointerArithmetic, EvalErrorKind, Scalar};
use super::{
- EvalContext, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
+ InterpretCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
#[inline]
pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> EvalResult<'tcx> {
if let Some(target) = target {
use rustc::ty::layout::{Size, Align, LayoutOf};
use rustc::mir::interpret::{Scalar, Pointer, EvalResult, PointerArithmetic};
-use super::{EvalContext, Machine, MemoryKind};
+use super::{InterpretCx, Machine, MemoryKind};
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
/// Creates a dynamic vtable for the given type and vtable origin. This is used only for
/// objects.
///
};
use super::{
- OpTy, Machine, EvalContext, ValueVisitor, MPlaceTy,
+ OpTy, Machine, InterpretCx, ValueVisitor, MPlaceTy,
};
macro_rules! validation_failure {
path: Vec<PathElem>,
ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>>>,
const_mode: bool,
- ecx: &'rt EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &'rt InterpretCx<'a, 'mir, 'tcx, M>,
}
impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> ValidityVisitor<'rt, 'a, 'mir, 'tcx, M> {
type V = OpTy<'tcx, M::PointerTag>;
#[inline(always)]
- fn ecx(&self) -> &EvalContext<'a, 'mir, 'tcx, M> {
+ fn ecx(&self) -> &InterpretCx<'a, 'mir, 'tcx, M> {
&self.ecx
}
}
}
-impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
+impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
/// This function checks the data at `op`. `op` is assumed to cover valid memory if it
/// is an indirect operand.
/// It will error if the bits at the destination do not match the ones described by the layout.
};
use super::{
- Machine, EvalContext, MPlaceTy, OpTy,
+ Machine, InterpretCx, MPlaceTy, OpTy,
};
// A thing that we can project into, and that has a layout.
/// Makes this into an `OpTy`.
fn to_op(
self,
- ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>>;
/// Creates this from an `MPlaceTy`.
/// Projects to the given enum variant.
fn project_downcast(
self,
- ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
variant: VariantIdx,
) -> EvalResult<'tcx, Self>;
/// Projects to the n-th field.
fn project_field(
self,
- ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
field: u64,
) -> EvalResult<'tcx, Self>;
}
#[inline(always)]
fn to_op(
self,
- _ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ _ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
Ok(self)
}
#[inline(always)]
fn project_downcast(
self,
- ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
variant: VariantIdx,
) -> EvalResult<'tcx, Self> {
ecx.operand_downcast(self, variant)
#[inline(always)]
fn project_field(
self,
- ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
field: u64,
) -> EvalResult<'tcx, Self> {
ecx.operand_field(self, field)
#[inline(always)]
fn to_op(
self,
- _ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ _ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
Ok(self.into())
}
#[inline(always)]
fn project_downcast(
self,
- ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
variant: VariantIdx,
) -> EvalResult<'tcx, Self> {
ecx.mplace_downcast(self, variant)
#[inline(always)]
fn project_field(
self,
- ecx: &EvalContext<'a, 'mir, 'tcx, M>,
+ ecx: &InterpretCx<'a, 'mir, 'tcx, M>,
field: u64,
) -> EvalResult<'tcx, Self> {
ecx.mplace_field(self, field)
pub trait $visitor_trait_name<'a, 'mir, 'tcx: 'mir+'a, M: Machine<'a, 'mir, 'tcx>>: Sized {
type V: Value<'a, 'mir, 'tcx, M>;
- /// The visitor must have an `EvalContext` in it.
+ /// The visitor must have an `InterpretCx` in it.
fn ecx(&$($mutability)? self)
- -> &$($mutability)? EvalContext<'a, 'mir, 'tcx, M>;
+ -> &$($mutability)? InterpretCx<'a, 'mir, 'tcx, M>;
// Recursive actions, ready to be overloaded.
/// Visits the given value, dispatching as appropriate to more specialized visitors.
HasTyCtxt, TargetDataLayout, HasDataLayout,
};
-use crate::interpret::{EvalContext, ScalarMaybeUndef, Immediate, OpTy, ImmTy, MemoryKind};
+use crate::interpret::{InterpretCx, ScalarMaybeUndef, Immediate, OpTy, ImmTy, MemoryKind};
use crate::const_eval::{
CompileTimeInterpreter, error_to_const_error, eval_promoted, mk_eval_cx,
};
/// Finds optimization opportunities on the MIR.
struct ConstPropagator<'a, 'mir, 'tcx:'a+'mir> {
- ecx: EvalContext<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
+ ecx: InterpretCx<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
mir: &'mir Mir<'tcx>,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
source: MirSource<'tcx>,
current_item: hir::DUMMY_HIR_ID,
empty_tables: &empty_tables,
};
- let (module, span, node_id) = tcx.hir().get_module(module_def_id);
- let hir_id = tcx.hir().node_to_hir_id(node_id);
+ let (module, span, hir_id) = tcx.hir().get_module(module_def_id);
intravisit::walk_mod(&mut visitor, module, hir_id);
// Check privacy of explicitly written types and traits as well as
augment_error(&mut db);
}
- db.emit();
+ if expression.filter(|e| fcx.is_assign_to_bool(e, expected)).is_some() {
+ // Error reported in `check_assign` so avoid emitting error again.
+ db.delay_as_bug();
+ } else {
+ db.emit();
+ }
self.final_ty = Some(fcx.tcx.types.err);
}
let expr_ty = self.resolve_type_vars_with_obligations(checked_ty);
let mut err = self.report_mismatched_types(&cause, expected, expr_ty, e);
- // If the expected type is an enum (Issue #55250) with any variants whose
- // sole field is of the found type, suggest such variants. (Issue #42764)
- if let ty::Adt(expected_adt, substs) = expected.sty {
- if expected_adt.is_enum() {
- let mut compatible_variants = expected_adt.variants
- .iter()
- .filter(|variant| variant.fields.len() == 1)
- .filter_map(|variant| {
- let sole_field = &variant.fields[0];
- let sole_field_ty = sole_field.ty(self.tcx, substs);
- if self.can_coerce(expr_ty, sole_field_ty) {
- let variant_path = self.tcx.def_path_str(variant.def_id);
- // FIXME #56861: DRYer prelude filtering
- Some(variant_path.trim_start_matches("std::prelude::v1::").to_string())
- } else {
- None
- }
- }).peekable();
-
- if compatible_variants.peek().is_some() {
- let expr_text = print::to_string(print::NO_ANN, |s| s.print_expr(expr));
- let suggestions = compatible_variants
- .map(|v| format!("{}({})", v, expr_text));
- err.span_suggestions(
- expr.span,
- "try using a variant of the expected type",
- suggestions,
- Applicability::MaybeIncorrect,
- );
- }
- }
+ if self.is_assign_to_bool(expr, expected) {
+ // Error reported in `check_assign` so avoid emitting error again.
+ err.delay_as_bug();
+ return (expected, None)
}
+ self.suggest_compatible_variants(&mut err, expr, expected, expr_ty);
self.suggest_ref_or_into(&mut err, expr, expected, expr_ty);
(expected, Some(err))
}
+ /// Returns whether the expected type is `bool` and the expression is `x = y`.
+ pub fn is_assign_to_bool(&self, expr: &hir::Expr, expected: Ty<'tcx>) -> bool {
+ if let hir::ExprKind::Assign(..) = expr.node {
+ return expected == self.tcx.types.bool;
+ }
+ false
+ }
+
+ /// If the expected type is an enum (Issue #55250) with any variants whose
+ /// sole field is of the found type, suggest such variants. (Issue #42764)
+ fn suggest_compatible_variants(
+ &self,
+ err: &mut DiagnosticBuilder<'_>,
+ expr: &hir::Expr,
+ expected: Ty<'tcx>,
+ expr_ty: Ty<'tcx>,
+ ) {
+ if let ty::Adt(expected_adt, substs) = expected.sty {
+ if !expected_adt.is_enum() {
+ return;
+ }
+
+ let mut compatible_variants = expected_adt.variants
+ .iter()
+ .filter(|variant| variant.fields.len() == 1)
+ .filter_map(|variant| {
+ let sole_field = &variant.fields[0];
+ let sole_field_ty = sole_field.ty(self.tcx, substs);
+ if self.can_coerce(expr_ty, sole_field_ty) {
+ let variant_path = self.tcx.def_path_str(variant.def_id);
+ // FIXME #56861: DRYer prelude filtering
+ Some(variant_path.trim_start_matches("std::prelude::v1::").to_string())
+ } else {
+ None
+ }
+ }).peekable();
+
+ if compatible_variants.peek().is_some() {
+ let expr_text = print::to_string(print::NO_ANN, |s| s.print_expr(expr));
+ let suggestions = compatible_variants
+ .map(|v| format!("{}({})", v, expr_text));
+ let msg = "try using a variant of the expected type";
+ err.span_suggestions(expr.span, msg, suggestions, Applicability::MaybeIncorrect);
+ }
+ }
+ }
+
pub fn get_conversion_methods(&self, span: Span, expected: Ty<'tcx>, checked_ty: Ty<'tcx>)
-> Vec<AssociatedItem> {
let mut methods = self.probe_for_return_type(span,
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use crate::middle::lang_items;
use crate::namespace::Namespace;
-use rustc::infer::{self, InferCtxt, InferOk, InferResult, RegionVariableOrigin};
+use rustc::infer::{self, InferCtxt, InferOk, InferResult};
use rustc::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::sync::Lrc;
/// We know nothing about what type this expression should have.
NoExpectation,
- /// This expression is an `if` condition, it must resolve to `bool`.
- ExpectIfCondition,
-
/// This expression should have the type given (or some subtype).
ExpectHasType(Ty<'tcx>),
fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> {
match self {
NoExpectation => NoExpectation,
- ExpectIfCondition => ExpectIfCondition,
ExpectCastableToType(t) => {
ExpectCastableToType(fcx.resolve_type_vars_if_possible(&t))
}
fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
match self.resolve(fcx) {
NoExpectation => None,
- ExpectIfCondition => Some(fcx.tcx.types.bool),
ExpectCastableToType(ty) |
ExpectHasType(ty) |
ExpectRvalueLikeUnsized(ty) => Some(ty),
fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
match self.resolve(fcx) {
ExpectHasType(ty) => Some(ty),
- ExpectIfCondition => Some(fcx.tcx.types.bool),
NoExpectation | ExpectCastableToType(_) | ExpectRvalueLikeUnsized(_) => None,
}
}
_ => None
}
});
- opt_ty.unwrap_or_else(
- || tcx.mk_int_var(self.next_int_var_id()))
+ opt_ty.unwrap_or_else(|| self.next_int_var())
}
ast::LitKind::Float(_, t) => tcx.mk_mach_float(t),
ast::LitKind::FloatUnsuffixed(_) => {
_ => None
}
});
- opt_ty.unwrap_or_else(
- || tcx.mk_float_var(self.next_float_var_id()))
+ opt_ty.unwrap_or_else(|| self.next_float_var())
}
ast::LitKind::Bool(_) => tcx.types.bool,
ast::LitKind::Err(_) => tcx.types.err,
}
if let Some(mut err) = self.demand_suptype_diag(expr.span, expected_ty, ty) {
- // Add help to type error if this is an `if` condition with an assignment.
- if let (ExpectIfCondition, &ExprKind::Assign(ref lhs, ref rhs))
- = (expected, &expr.node)
- {
- let msg = "try comparing for equality";
- if let (Ok(left), Ok(right)) = (
- self.tcx.sess.source_map().span_to_snippet(lhs.span),
- self.tcx.sess.source_map().span_to_snippet(rhs.span))
- {
- err.span_suggestion(
- expr.span,
- msg,
- format!("{} == {}", left, right),
- Applicability::MaybeIncorrect);
- } else {
- err.help(msg);
- }
+ if self.is_assign_to_bool(expr, expected_ty) {
+ // Error reported in `check_assign` so avoid emitting error again.
+ // FIXME(centril): Consider removing if/when `if` desugars to `match`.
+ err.delay_as_bug();
+ } else {
+ err.emit();
}
- err.emit();
}
ty
}
Some(ret) => ret,
None => return Vec::new()
};
- let expect_args = self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || {
+ let expect_args = self.fudge_inference_if_ok(|| {
// Attempt to apply a subtyping relationship between the formal
// return type (likely containing type variables if the function
// is polymorphic) and the expected return type.
opt_else_expr: Option<&'gcx hir::Expr>,
sp: Span,
expected: Expectation<'tcx>) -> Ty<'tcx> {
- let cond_ty = self.check_expr_meets_expectation_or_error(cond_expr, ExpectIfCondition);
+ let cond_ty = self.check_expr_has_type_or_error(cond_expr, self.tcx.types.bool);
let cond_diverges = self.diverges.get();
self.diverges.set(Diverges::Maybe);
tcx.types.never
}
ExprKind::Assign(ref lhs, ref rhs) => {
- let lhs_ty = self.check_expr_with_needs(&lhs, Needs::MutPlace);
-
- let rhs_ty = self.check_expr_coercable_to_type(&rhs, lhs_ty);
-
- match expected {
- ExpectIfCondition => {
- self.tcx.sess.delay_span_bug(lhs.span, "invalid lhs expression in if;\
- expected error elsehwere");
- }
- _ => {
- // Only check this if not in an `if` condition, as the
- // mistyped comparison help is more appropriate.
- if !lhs.is_place_expr() {
- struct_span_err!(self.tcx.sess, expr.span, E0070,
- "invalid left-hand side expression")
- .span_label(expr.span, "left-hand of expression not valid")
- .emit();
- }
- }
- }
-
- self.require_type_is_sized(lhs_ty, lhs.span, traits::AssignmentLhsSized);
-
- if lhs_ty.references_error() || rhs_ty.references_error() {
- tcx.types.err
- } else {
- tcx.mk_unit()
- }
+ self.check_assign(expr, expected, lhs, rhs)
}
ExprKind::If(ref cond, ref then_expr, ref opt_else_expr) => {
self.check_then_else(&cond, then_expr, opt_else_expr.as_ref().map(|e| &**e),
}
}
+ /// Type check assignment expression `expr` of form `lhs = rhs`.
+ /// The expected type is `()` and is passsed to the function for the purposes of diagnostics.
+ fn check_assign(
+ &self,
+ expr: &'gcx hir::Expr,
+ expected: Expectation<'tcx>,
+ lhs: &'gcx hir::Expr,
+ rhs: &'gcx hir::Expr,
+ ) -> Ty<'tcx> {
+ let lhs_ty = self.check_expr_with_needs(&lhs, Needs::MutPlace);
+ let rhs_ty = self.check_expr_coercable_to_type(&rhs, lhs_ty);
+
+ let expected_ty = expected.coercion_target_type(self, expr.span);
+ if expected_ty == self.tcx.types.bool {
+ // The expected type is `bool` but this will result in `()` so we can reasonably
+ // say that the user intended to write `lhs == rhs` instead of `lhs = rhs`.
+ // The likely cause of this is `if foo = bar { .. }`.
+ let actual_ty = self.tcx.mk_unit();
+ let mut err = self.demand_suptype_diag(expr.span, expected_ty, actual_ty).unwrap();
+ let msg = "try comparing for equality";
+ let left = self.tcx.sess.source_map().span_to_snippet(lhs.span);
+ let right = self.tcx.sess.source_map().span_to_snippet(rhs.span);
+ if let (Ok(left), Ok(right)) = (left, right) {
+ let help = format!("{} == {}", left, right);
+ err.span_suggestion(expr.span, msg, help, Applicability::MaybeIncorrect);
+ } else {
+ err.help(msg);
+ }
+ err.emit();
+ } else if !lhs.is_place_expr() {
+ struct_span_err!(self.tcx.sess, expr.span, E0070,
+ "invalid left-hand side expression")
+ .span_label(expr.span, "left-hand of expression not valid")
+ .emit();
+ }
+
+ self.require_type_is_sized(lhs_ty, lhs.span, traits::AssignmentLhsSized);
+
+ if lhs_ty.references_error() || rhs_ty.references_error() {
+ self.tcx.types.err
+ } else {
+ self.tcx.mk_unit()
+ }
+ }
+
// Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
// The newly resolved definition is written into `type_dependent_defs`.
fn finish_resolving_struct_path(&self,
}
fn type_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Ty<'tcx> {
+ checked_type_of(tcx, def_id, true).unwrap()
+}
+
+/// Same as [`type_of`] but returns [`Option`] instead of failing.
+///
+/// If you want to fail anyway, you can set the `fail` parameter to true, but in this case,
+/// you'd better just call [`type_of`] directly.
+pub fn checked_type_of<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId,
+ fail: bool,
+) -> Option<Ty<'tcx>> {
use rustc::hir::*;
- let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ let hir_id = match tcx.hir().as_local_hir_id(def_id) {
+ Some(hir_id) => hir_id,
+ None => {
+ if !fail {
+ return None;
+ }
+ bug!("invalid node");
+ }
+ };
let icx = ItemCtxt::new(tcx, def_id);
- match tcx.hir().get_by_hir_id(hir_id) {
+ Some(match tcx.hir().get_by_hir_id(hir_id) {
Node::TraitItem(item) => match item.node {
TraitItemKind::Method(..) => {
let substs = InternalSubsts::identity_for_item(tcx, def_id);
}
TraitItemKind::Const(ref ty, _) | TraitItemKind::Type(_, Some(ref ty)) => icx.to_ty(ty),
TraitItemKind::Type(_, None) => {
+ if !fail {
+ return None;
+ }
span_bug!(item.span, "associated type missing default");
}
},
| ItemKind::GlobalAsm(..)
| ItemKind::ExternCrate(..)
| ItemKind::Use(..) => {
+ if !fail {
+ return None;
+ }
span_bug!(
item.span,
"compute_type_of_item: unexpected item type: {:?}",
..
}) => {
if gen.is_some() {
- return tcx.typeck_tables_of(def_id).node_type(hir_id);
+ return Some(tcx.typeck_tables_of(def_id).node_type(hir_id));
}
let substs = ty::ClosureSubsts {
}
// Sanity check to make sure everything is as expected.
if !found_const {
+ if !fail {
+ return None;
+ }
bug!("no arg matching AnonConst in path")
}
match path.def {
for param in &generics.params {
if let ty::GenericParamDefKind::Const = param.kind {
if param_index == arg_index {
- return tcx.type_of(param.def_id);
+ return Some(tcx.type_of(param.def_id));
}
param_index += 1;
}
}
// This is no generic parameter associated with the arg. This is
// probably from an extra arg where one is not needed.
- return tcx.types.err;
+ return Some(tcx.types.err);
}
Def::Err => tcx.types.err,
- x => bug!("unexpected const parent path def {:?}", x),
+ x => {
+ if !fail {
+ return None;
+ }
+ bug!("unexpected const parent path def {:?}", x);
+ }
+ }
+ }
+ x => {
+ if !fail {
+ return None;
}
+ bug!("unexpected const parent path {:?}", x);
}
- x => bug!("unexpected const parent path {:?}", x),
}
}
x => {
+ if !fail {
+ return None;
+ }
bug!("unexpected const parent in type_of_def_id(): {:?}", x);
}
}
hir::GenericParamKind::Const { ref ty, .. } => {
icx.to_ty(ty)
}
- x => bug!("unexpected non-type Node::GenericParam: {:?}", x),
+ x => {
+ if !fail {
+ return None;
+ }
+ bug!("unexpected non-type Node::GenericParam: {:?}", x)
+ },
},
x => {
+ if !fail {
+ return None;
+ }
bug!("unexpected sort of node in type_of_def_id(): {:?}", x);
}
- }
+ })
}
fn find_existential_constraints<'a, 'tcx>(
use std::iter;
+pub use collect::checked_type_of;
+
pub struct TypeAndSubsts<'tcx> {
substs: SubstsRef<'tcx>,
ty: Ty<'tcx>,
};
let predicates = cx.tcx.predicates_of(did);
+ let generics = (cx.tcx.generics_of(did), &predicates).clean(cx);
+ let decl = (did, sig).clean(cx);
+ let (all_types, ret_types) = clean::get_all_types(&generics, &decl, cx);
clean::Function {
- decl: (did, sig).clean(cx),
- generics: (cx.tcx.generics_of(did), &predicates).clean(cx),
+ decl,
+ generics,
header: hir::FnHeader {
unsafety: sig.unsafety(),
abi: sig.abi(),
constness,
asyncness: hir::IsAsync::NotAsync,
- }
+ },
+ all_types,
+ ret_types,
}
}
fn get_trait_type(&self) -> Option<Type> {
if let GenericBound::TraitBound(PolyTrait { ref trait_, .. }, _) = *self {
- return Some(trait_.clone());
+ Some(trait_.clone())
+ } else {
+ None
}
- None
}
}
EqPredicate { lhs: Type, rhs: Type },
}
+impl WherePredicate {
+ pub fn get_bounds(&self) -> Option<&[GenericBound]> {
+ match *self {
+ WherePredicate::BoundPredicate { ref bounds, .. } => Some(bounds),
+ WherePredicate::RegionPredicate { ref bounds, .. } => Some(bounds),
+ _ => None,
+ }
+ }
+}
+
impl Clean<WherePredicate> for hir::WherePredicate {
fn clean(&self, cx: &DocContext<'_>) -> WherePredicate {
match *self {
},
}
+impl GenericParamDefKind {
+ pub fn is_type(&self) -> bool {
+ match *self {
+ GenericParamDefKind::Type { .. } => true,
+ _ => false,
+ }
+ }
+
+ pub fn get_type(&self, cx: &DocContext<'_>) -> Option<Type> {
+ match *self {
+ GenericParamDefKind::Type { did, .. } => {
+ rustc_typeck::checked_type_of(cx.tcx, did, false).map(|t| t.clean(cx))
+ }
+ GenericParamDefKind::Const { ref ty, .. } => Some(ty.clone()),
+ GenericParamDefKind::Lifetime => None,
+ }
+ }
+}
+
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub struct GenericParamDef {
pub name: String,
pub fn is_synthetic_type_param(&self) -> bool {
match self.kind {
GenericParamDefKind::Lifetime |
- GenericParamDefKind::Const { .. } => {
- false
- }
+ GenericParamDefKind::Const { .. } => false,
GenericParamDefKind::Type { ref synthetic, .. } => synthetic.is_some(),
}
}
+
+ pub fn is_type(&self) -> bool {
+ self.kind.is_type()
+ }
+
+ pub fn get_type(&self, cx: &DocContext<'_>) -> Option<Type> {
+ self.kind.get_type(cx)
+ }
+
+ pub fn get_bounds(&self) -> Option<&[GenericBound]> {
+ match self.kind {
+ GenericParamDefKind::Type { ref bounds, .. } => Some(bounds),
+ _ => None,
+ }
+ }
}
impl Clean<GenericParamDef> for ty::GenericParamDef {
}
}
+/// The point of this function is to replace bounds with types.
+///
+/// i.e. `[T, U]` when you have the following bounds: `T: Display, U: Option<T>` will return
+/// `[Display, Option]` (we just returns the list of the types, we don't care about the
+/// wrapped types in here).
+fn get_real_types(
+ generics: &Generics,
+ arg: &Type,
+ cx: &DocContext<'_>,
+) -> FxHashSet<Type> {
+ let arg_s = arg.to_string();
+ let mut res = FxHashSet::default();
+ if arg.is_full_generic() {
+ if let Some(where_pred) = generics.where_predicates.iter().find(|g| {
+ match g {
+ &WherePredicate::BoundPredicate { ref ty, .. } => ty.def_id() == arg.def_id(),
+ _ => false,
+ }
+ }) {
+ let bounds = where_pred.get_bounds().unwrap_or_else(|| &[]);
+ for bound in bounds.iter() {
+ match *bound {
+ GenericBound::TraitBound(ref poly_trait, _) => {
+ for x in poly_trait.generic_params.iter() {
+ if !x.is_type() {
+ continue
+ }
+ if let Some(ty) = x.get_type(cx) {
+ let adds = get_real_types(generics, &ty, cx);
+ if !adds.is_empty() {
+ res.extend(adds);
+ } else if !ty.is_full_generic() {
+ res.insert(ty);
+ }
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+ if let Some(bound) = generics.params.iter().find(|g| {
+ g.is_type() && g.name == arg_s
+ }) {
+ for bound in bound.get_bounds().unwrap_or_else(|| &[]) {
+ if let Some(ty) = bound.get_trait_type() {
+ let adds = get_real_types(generics, &ty, cx);
+ if !adds.is_empty() {
+ res.extend(adds);
+ } else if !ty.is_full_generic() {
+ res.insert(ty.clone());
+ }
+ }
+ }
+ }
+ } else {
+ res.insert(arg.clone());
+ if let Some(gens) = arg.generics() {
+ for gen in gens.iter() {
+ if gen.is_full_generic() {
+ let adds = get_real_types(generics, gen, cx);
+ if !adds.is_empty() {
+ res.extend(adds);
+ }
+ } else {
+ res.insert(gen.clone());
+ }
+ }
+ }
+ }
+ res
+}
+
+/// Return the full list of types when bounds have been resolved.
+///
+/// i.e. `fn foo<A: Display, B: Option<A>>(x: u32, y: B)` will return
+/// `[u32, Display, Option]`.
+pub fn get_all_types(
+ generics: &Generics,
+ decl: &FnDecl,
+ cx: &DocContext<'_>,
+) -> (Vec<Type>, Vec<Type>) {
+ let mut all_types = FxHashSet::default();
+ for arg in decl.inputs.values.iter() {
+ if arg.type_.is_self_type() {
+ continue;
+ }
+ let args = get_real_types(generics, &arg.type_, cx);
+ if !args.is_empty() {
+ all_types.extend(args);
+ } else {
+ all_types.insert(arg.type_.clone());
+ }
+ }
+
+ let ret_types = match decl.output {
+ FunctionRetTy::Return(ref return_type) => {
+ let mut ret = get_real_types(generics, &return_type, cx);
+ if ret.is_empty() {
+ ret.insert(return_type.clone());
+ }
+ ret.into_iter().collect()
+ }
+ _ => Vec::new(),
+ };
+ (all_types.into_iter().collect(), ret_types)
+}
+
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Method {
pub generics: Generics,
pub decl: FnDecl,
pub header: hir::FnHeader,
pub defaultness: Option<hir::Defaultness>,
+ pub all_types: Vec<Type>,
+ pub ret_types: Vec<Type>,
}
impl<'a> Clean<Method> for (&'a hir::MethodSig, &'a hir::Generics, hir::BodyId,
let (generics, decl) = enter_impl_trait(cx, || {
(self.1.clean(cx), (&*self.0.decl, self.2).clean(cx))
});
+ let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
Method {
decl,
generics,
header: self.0.header,
defaultness: self.3,
+ all_types,
+ ret_types,
}
}
}
pub header: hir::FnHeader,
pub decl: FnDecl,
pub generics: Generics,
+ pub all_types: Vec<Type>,
+ pub ret_types: Vec<Type>,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub decl: FnDecl,
pub generics: Generics,
pub header: hir::FnHeader,
+ pub all_types: Vec<Type>,
+ pub ret_types: Vec<Type>,
}
impl Clean<Item> for doctree::Function {
} else {
hir::Constness::NotConst
};
+ let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
Item {
name: Some(self.name.clean(cx)),
attrs: self.attrs.clean(cx),
decl,
generics,
header: hir::FnHeader { constness, ..self.header },
+ all_types,
+ ret_types,
}),
}
}
FnDecl {
inputs: (&self.0.inputs[..], self.1).clean(cx),
output: self.0.output.clean(cx),
- attrs: Attributes::default()
+ attrs: Attributes::default(),
}
}
}
let (generics, decl) = enter_impl_trait(cx, || {
(self.generics.clean(cx), (&*sig.decl, &names[..]).clean(cx))
});
+ let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
TyMethodItem(TyMethod {
header: sig.header,
decl,
generics,
+ all_types,
+ ret_types,
})
}
hir::TraitItemKind::Type(ref bounds, ref default) => {
ty::ImplContainer(_) => true,
ty::TraitContainer(_) => self.defaultness.has_value()
};
+ let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
if provided {
let constness = if cx.tcx.is_min_const_fn(self.def_id) {
hir::Constness::Const
asyncness: hir::IsAsync::NotAsync,
},
defaultness: Some(self.defaultness),
+ all_types,
+ ret_types,
})
} else {
TyMethodItem(TyMethod {
abi: sig.abi(),
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
- }
+ },
+ all_types,
+ ret_types,
})
}
}
_ => None
}
}
+
+ pub fn is_full_generic(&self) -> bool {
+ match *self {
+ Type::Generic(_) => true,
+ _ => false,
+ }
+ }
}
impl GetDefId for Type {
let (generics, decl) = enter_impl_trait(cx, || {
(generics.clean(cx), (&**decl, &names[..]).clean(cx))
});
+ let (all_types, ret_types) = get_all_types(&generics, &decl, cx);
ForeignFunctionItem(Function {
decl,
generics,
constness: hir::Constness::NotConst,
asyncness: hir::IsAsync::NotAsync,
},
+ all_types,
+ ret_types,
})
}
hir::ForeignItemKind::Static(ref ty, mutbl) => {
}
Json::Array(data)
}
- None => Json::Null
+ None => Json::Null,
}
}
}
#[derive(Debug)]
struct IndexItemFunctionType {
inputs: Vec<Type>,
- output: Option<Type>,
+ output: Option<Vec<Type>>,
}
impl ToJson for IndexItemFunctionType {
fn to_json(&self) -> Json {
// If we couldn't figure out a type, just write `null`.
- if self.inputs.iter().chain(self.output.iter()).any(|ref i| i.name.is_none()) {
+ let mut iter = self.inputs.iter();
+ if match self.output {
+ Some(ref output) => iter.chain(output.iter()).any(|ref i| i.name.is_none()),
+ None => iter.any(|ref i| i.name.is_none()),
+ } {
Json::Null
} else {
let mut data = Vec::with_capacity(2);
data.push(self.inputs.to_json());
if let Some(ref output) = self.output {
- data.push(output.to_json());
+ if output.len() > 1 {
+ data.push(output.to_json());
+ } else {
+ data.push(output[0].to_json());
+ }
}
Json::Array(data)
}
}
fn get_index_search_type(item: &clean::Item) -> Option<IndexItemFunctionType> {
- let decl = match item.inner {
- clean::FunctionItem(ref f) => &f.decl,
- clean::MethodItem(ref m) => &m.decl,
- clean::TyMethodItem(ref m) => &m.decl,
- _ => return None
+ let (all_types, ret_types) = match item.inner {
+ clean::FunctionItem(ref f) => (&f.all_types, &f.ret_types),
+ clean::MethodItem(ref m) => (&m.all_types, &m.ret_types),
+ clean::TyMethodItem(ref m) => (&m.all_types, &m.ret_types),
+ _ => return None,
};
- let inputs = decl.inputs.values.iter().map(|arg| get_index_type(&arg.type_)).collect();
- let output = match decl.output {
- clean::FunctionRetTy::Return(ref return_type) => Some(get_index_type(return_type)),
- _ => None
+ let inputs = all_types.iter().map(|arg| {
+ get_index_type(&arg)
+ }).filter(|a| a.name.is_some()).collect();
+ let output = ret_types.iter().map(|arg| {
+ get_index_type(&arg)
+ }).filter(|a| a.name.is_some()).collect::<Vec<_>>();
+ let output = if output.is_empty() {
+ None
+ } else {
+ Some(output)
};
- Some(IndexItemFunctionType { inputs: inputs, output: output })
+ Some(IndexItemFunctionType { inputs, output })
}
fn get_index_type(clean_type: &clean::Type) -> Type {
}
lev_distance = Math.min(levenshtein(obj[NAME], val.name), lev_distance);
if (lev_distance <= MAX_LEV_DISTANCE) {
- lev_distance = Math.min(checkGenerics(obj, val), lev_distance);
+ // The generics didn't match but the name kinda did so we give it
+ // a levenshtein distance value that isn't *this* good so it goes
+ // into the search results but not too high.
+ lev_distance = Math.ceil((checkGenerics(obj, val) + lev_distance) / 2);
} else if (obj.length > GENERICS_DATA && obj[GENERICS_DATA].length > 0) {
// We can check if the type we're looking for is inside the generics!
var olength = obj[GENERICS_DATA].length;
var lev_distance = MAX_LEV_DISTANCE + 1;
if (obj && obj.type && obj.type.length > OUTPUT_DATA) {
- var tmp = checkType(obj.type[OUTPUT_DATA], val, literalSearch);
- if (literalSearch === true && tmp === true) {
- return true;
+ var ret = obj.type[OUTPUT_DATA];
+ if (!obj.type[OUTPUT_DATA].length) {
+ ret = [ret];
}
- lev_distance = Math.min(tmp, lev_distance);
- if (lev_distance === 0) {
- return 0;
+ for (var x = 0; x < ret.length; ++x) {
+ var r = ret[x];
+ if (typeof r === "string") {
+ r = [r];
+ }
+ var tmp = checkType(r, val, literalSearch);
+ if (literalSearch === true) {
+ if (tmp === true) {
+ return true;
+ }
+ continue;
+ }
+ lev_distance = Math.min(tmp, lev_distance);
+ if (lev_distance === 0) {
+ return 0;
+ }
}
}
return literalSearch === true ? false : lev_distance;
border-top: 2px solid;
}
-#titles > div:not(:last-child):not(.selected) {
+#titles > div:not(:last-child) {
margin-right: 1px;
width: calc(33.3% - 1px);
}
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; color: #2f2f2f; }
.stab.portability { background: #C4ECFF; border-color: #7BA5DB; color: #2f2f2f; }
+.stab > code {
+ color: #ddd;
+}
+
#help > div {
background: #4d4d4d;
border-color: #bfbfbf;
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; }
.stab.portability { background: #C4ECFF; border-color: #7BA5DB; }
+.stab > code {
+ color: #000;
+}
+
#help > div {
background: #e9e9e9;
border-color: #bfbfbf;
#[unstable(feature = "sgx_platform", issue = "56975")]
pub fn egetkey(request: &Align512<[u8; 512]>) -> Result<Align16<[u8; 16]>, u32> {
unsafe {
- let mut out = MaybeUninit::uninitialized();
+ let mut out = MaybeUninit::uninit();
let error;
asm!(
);
match error {
- 0 => Ok(out.into_initialized()),
+ 0 => Ok(out.assume_init()),
err => Err(err),
}
}
reportdata: &Align128<[u8; 64]>,
) -> Align512<[u8; 432]> {
unsafe {
- let mut report = MaybeUninit::uninitialized();
+ let mut report = MaybeUninit::uninit();
asm!(
"enclu"
"{rdx}"(report.as_mut_ptr())
);
- report.into_initialized()
+ report.assume_init()
}
}
let mut init = MaybeUninit::<RWLock>::zeroed();
rwlock_new(&mut init);
assert_eq!(
- mem::transmute::<_, [u8; 128]>(init.into_initialized()).as_slice(),
+ mem::transmute::<_, [u8; 128]>(init.assume_init()).as_slice(),
RWLOCK_INIT
)
};
impl ReentrantMutex {
pub fn uninitialized() -> ReentrantMutex {
- ReentrantMutex { inner: UnsafeCell::new(MaybeUninit::uninitialized()) }
+ ReentrantMutex { inner: UnsafeCell::new(MaybeUninit::uninit()) }
}
pub unsafe fn init(&mut self) {
p.fatal(&format!("expected ident, found {}", &token_str)).emit();
FatalError.raise()
}
- "path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))),
+ "path" => token::NtPath(panictry!(p.parse_path(PathStyle::Type))),
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
"vis" => token::NtVis(panictry!(p.parse_visibility(true))),
"lifetime" => if p.check_lifetime() {
if text.is_empty() {
self.span_bug(sp, "found empty literal suffix in Some")
}
- let msg = format!("{} with a suffix is invalid", kind);
- self.struct_span_err(sp, &msg)
- .span_label(sp, msg)
+ self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
+ .span_label(sp, format!("invalid suffix `{}`", text))
.emit();
}
}
self.expect(&token::ModSep)?;
let mut path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP };
- self.parse_path_segments(&mut path.segments, T::PATH_STYLE, true)?;
+ self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
path.span = ty_span.to(self.prev_span);
let ty_str = self.sess.source_map().span_to_snippet(ty_span)
if suffix_illegal {
let sp = self.span;
- self.expect_no_suffix(sp, lit.literal_name(), suf)
+ self.expect_no_suffix(sp, &format!("a {}", lit.literal_name()), suf)
}
result.unwrap()
self.expect(&token::ModSep)?;
let qself = QSelf { ty, path_span, position: path.segments.len() };
- self.parse_path_segments(&mut path.segments, style, true)?;
+ self.parse_path_segments(&mut path.segments, style)?;
Ok((qself, ast::Path { segments: path.segments, span: lo.to(self.prev_span) }))
}
/// `Fn(Args)` (without disambiguator)
/// `Fn::(Args)` (with disambiguator)
pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
- self.parse_path_common(style, true)
- }
-
- crate fn parse_path_common(&mut self, style: PathStyle, enable_warning: bool)
- -> PResult<'a, ast::Path> {
maybe_whole!(self, NtPath, |path| {
if style == PathStyle::Mod &&
path.segments.iter().any(|segment| segment.args.is_some()) {
if self.eat(&token::ModSep) {
segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
}
- self.parse_path_segments(&mut segments, style, enable_warning)?;
+ self.parse_path_segments(&mut segments, style)?;
Ok(ast::Path { segments, span: lo.to(self.prev_span) })
}
fn parse_path_segments(&mut self,
segments: &mut Vec<PathSegment>,
- style: PathStyle,
- enable_warning: bool)
+ style: PathStyle)
-> PResult<'a, ()> {
loop {
- let segment = self.parse_path_segment(style, enable_warning)?;
+ let segment = self.parse_path_segment(style)?;
if style == PathStyle::Expr {
// In order to check for trailing angle brackets, we must have finished
// recursing (`parse_path_segment` can indirectly call this function),
}
}
- fn parse_path_segment(&mut self, style: PathStyle, enable_warning: bool)
- -> PResult<'a, PathSegment> {
+ fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> {
let ident = self.parse_path_segment_ident()?;
let is_args_start = |token: &token::Token| match *token {
Ok(if style == PathStyle::Type && check_args_start(self) ||
style != PathStyle::Mod && self.check(&token::ModSep)
&& self.look_ahead(1, |t| is_args_start(t)) {
- // Generic arguments are found - `<`, `(`, `::<` or `::(`.
- if self.eat(&token::ModSep) && style == PathStyle::Type && enable_warning {
- self.diagnostic().struct_span_warn(self.prev_span, "unnecessary path disambiguator")
- .span_label(self.prev_span, "try removing `::`").emit();
- }
- let lo = self.span;
-
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
// it isn't, then we reset the unmatched angle bracket count as we're about to start
// parsing a new path.
self.max_angle_bracket_count = 0;
}
+ // Generic arguments are found - `<`, `(`, `::<` or `::(`.
+ self.eat(&token::ModSep);
+ let lo = self.span;
let args = if self.eat_lt() {
// `<'a, T, A = U>`
let (args, bindings) =
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
- if let token::Literal(token::Integer(name), None) = self.token {
+ if let token::Literal(token::Integer(name), suffix) = self.token {
+ self.expect_no_suffix(self.span, "a tuple index", suffix);
self.bump();
Ok(Ident::new(name, self.prev_span))
} else {
// Assuming we have just parsed `.`, continue parsing into an expression.
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
- let segment = self.parse_path_segment(PathStyle::Expr, true)?;
+ let segment = self.parse_path_segment(PathStyle::Expr)?;
self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren));
Ok(match self.token {
// expr.f
if self.eat(&token::Dot) {
match self.token {
- token::Ident(..) => {
- e = self.parse_dot_suffix(e, lo)?;
- }
- token::Literal(token::Integer(name), _) => {
- let span = self.span;
- self.bump();
- let field = ExprKind::Field(e, Ident::new(name, span));
- e = self.mk_expr(lo.to(span), field, ThinVec::new());
- }
- token::Literal(token::Float(n), _suf) => {
- self.bump();
- let fstr = n.as_str();
- let mut err = self.diagnostic()
- .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
- err.span_label(self.prev_span, "unexpected token");
- if fstr.chars().all(|x| "0123456789.".contains(x)) {
- let float = match fstr.parse::<f64>().ok() {
- Some(f) => f,
- None => continue,
- };
- let sugg = pprust::to_string(|s| {
- use crate::print::pprust::PrintState;
- s.popen()?;
- s.print_expr(&e)?;
- s.s.word( ".")?;
- s.print_usize(float.trunc() as usize)?;
- s.pclose()?;
- s.s.word(".")?;
- s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
- });
- err.span_suggestion(
- lo.to(self.prev_span),
- "try parenthesizing the first index",
- sugg,
- Applicability::MachineApplicable
- );
+ token::Ident(..) => {
+ e = self.parse_dot_suffix(e, lo)?;
}
- return Err(err);
+ token::Literal(token::Integer(name), suffix) => {
+ let span = self.span;
+ self.bump();
+ let field = ExprKind::Field(e, Ident::new(name, span));
+ e = self.mk_expr(lo.to(span), field, ThinVec::new());
+
+ self.expect_no_suffix(span, "a tuple index", suffix);
+ }
+ token::Literal(token::Float(n), _suf) => {
+ self.bump();
+ let fstr = n.as_str();
+ let mut err = self.diagnostic()
+ .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
+ err.span_label(self.prev_span, "unexpected token");
+ if fstr.chars().all(|x| "0123456789.".contains(x)) {
+ let float = match fstr.parse::<f64>().ok() {
+ Some(f) => f,
+ None => continue,
+ };
+ let sugg = pprust::to_string(|s| {
+ use crate::print::pprust::PrintState;
+ s.popen()?;
+ s.print_expr(&e)?;
+ s.s.word( ".")?;
+ s.print_usize(float.trunc() as usize)?;
+ s.pclose()?;
+ s.s.word(".")?;
+ s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
+ });
+ err.span_suggestion(
+ lo.to(self.prev_span),
+ "try parenthesizing the first index",
+ sugg,
+ Applicability::MachineApplicable
+ );
+ }
+ return Err(err);
- }
- _ => {
- // FIXME Could factor this out into non_fatal_unexpected or something.
- let actual = self.this_token_to_string();
- self.span_err(self.span, &format!("unexpected token: `{}`", actual));
- }
+ }
+ _ => {
+ // FIXME Could factor this out into non_fatal_unexpected or something.
+ let actual = self.this_token_to_string();
+ self.span_err(self.span, &format!("unexpected token: `{}`", actual));
+ }
}
continue;
}
match self.token {
token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
let sp = self.span;
- self.expect_no_suffix(sp, "ABI spec", suf);
+ self.expect_no_suffix(sp, "an ABI spec", suf);
self.bump();
match abi::lookup(&s.as_str()) {
Some(abi) => Ok(Some(abi)),
match self.parse_optional_str() {
Some((s, style, suf)) => {
let sp = self.prev_span;
- self.expect_no_suffix(sp, "string literal", suf);
+ self.expect_no_suffix(sp, "a string literal", suf);
Ok((s, style))
}
_ => {
fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
span.with_ctxt(at.ctxt())
}
+ fn source_text(&mut self, span: Self::Span) -> Option<String> {
+ self.sess.source_map().span_to_snippet(span).ok()
+ }
}
-Subproject commit 4fc9fb8245abe24680192535870c4522644a4212
+Subproject commit 1f484cbe0e863e9e215f1b3d7198063444d60873
# If this file is modified, then llvm will be (optionally) cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2019-01-16
+2019-03-18
// CHECK-NOT: alloca
// CHECK-NOT: memcpy
// CHECK-NOT: memset
- Box::new(MaybeUninit::uninitialized())
+ Box::new(MaybeUninit::uninit())
}
+
+// FIXME: add a test for a bigger box. Currently broken, see
+// https://github.com/rust-lang/rust/issues/58201.
}
pub fn main() {
- check!(P1::<u8, u8>, 1, 3);
- check!(P1::<u64, u16>, 1, 11);
+ check!(P1<u8, u8>, 1, 3);
+ check!(P1<u64, u16>, 1, 11);
- check!(P2::<u8, u8>, 1, 3);
- check!(P2::<u64, u16>, 2, 12);
+ check!(P2<u8, u8>, 1, 3);
+ check!(P2<u64, u16>, 2, 12);
- check!(P4C::<u8, u8>, 1, 3);
- check!(P4C::<u16, u64>, 4, 12);
+ check!(P4C<u8, u8>, 1, 3);
+ check!(P4C<u16, u64>, 4, 12);
}
+++ /dev/null
-warning: unnecessary path disambiguator
- --> $DIR/packed-struct-generic-size.rs:36:14
- |
-LL | check!(P1::<u8, u8>, 1, 3);
- | ^^ try removing `::`
-
-warning: unnecessary path disambiguator
- --> $DIR/packed-struct-generic-size.rs:37:14
- |
-LL | check!(P1::<u64, u16>, 1, 11);
- | ^^ try removing `::`
-
-warning: unnecessary path disambiguator
- --> $DIR/packed-struct-generic-size.rs:39:14
- |
-LL | check!(P2::<u8, u8>, 1, 3);
- | ^^ try removing `::`
-
-warning: unnecessary path disambiguator
- --> $DIR/packed-struct-generic-size.rs:40:14
- |
-LL | check!(P2::<u64, u16>, 2, 12);
- | ^^ try removing `::`
-
-warning: unnecessary path disambiguator
- --> $DIR/packed-struct-generic-size.rs:42:15
- |
-LL | check!(P4C::<u8, u8>, 1, 3);
- | ^^ try removing `::`
-
-warning: unnecessary path disambiguator
- --> $DIR/packed-struct-generic-size.rs:43:15
- |
-LL | check!(P4C::<u16, u64>, 4, 12);
- | ^^ try removing `::`
-
assert_eq!(
panic::catch_unwind(|| {
- mem::MaybeUninit::<!>::uninitialized().into_initialized()
+ mem::MaybeUninit::<!>::uninit().assume_init()
}).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
s == "Attempted to instantiate uninhabited type !"
})),
assert_eq!(
panic::catch_unwind(|| {
- mem::MaybeUninit::<Foo>::uninitialized().into_initialized()
+ mem::MaybeUninit::<Foo>::uninit().assume_init()
}).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
s == "Attempted to instantiate uninhabited type Foo"
})),
assert_eq!(
panic::catch_unwind(|| {
- mem::MaybeUninit::<Bar>::uninitialized().into_initialized()
+ mem::MaybeUninit::<Bar>::uninit().assume_init()
}).err().and_then(|a| a.downcast_ref::<String>().map(|s| {
s == "Attempted to instantiate uninhabited type Bar"
})),
"".parse().unwrap()
}
+
+#[proc_macro]
+pub fn macro_stringify(input: TokenStream) -> TokenStream {
+ let mut tokens = input.into_iter();
+ let first_span = tokens.next().expect("first token").span();
+ let last_span = tokens.last().map(|x| x.span()).unwrap_or(first_span);
+ let span = first_span.join(last_span).expect("joined span");
+ let src = span.source_text().expect("source_text");
+ TokenTree::Literal(Literal::string(&src)).into()
+}
+
// ignore-pretty
+#![feature(proc_macro_hygiene)]
+
#[macro_use]
extern crate span_test_macros;
extern crate span_api_tests;
-use span_api_tests::{reemit, assert_fake_source_file, assert_source_file};
+use span_api_tests::{reemit, assert_fake_source_file, assert_source_file, macro_stringify};
macro_rules! say_hello {
($macname:ident) => ( $macname! { "Hello, world!" })
assert_source_file! { "Hello, world!" }
}
-fn main() {}
+fn main() {
+ let s = macro_stringify!(Hello, world!);
+ assert_eq!(s, "Hello, world!");
+ assert_eq!(macro_stringify!(Hello, world!), "Hello, world!");
+ assert_eq!(reemit_legacy!(macro_stringify!(Hello, world!)), "Hello, world!");
+ reemit_legacy!(assert_eq!(macro_stringify!(Hello, world!), "Hello, world!"));
+ // reemit change the span to be that of the call site
+ assert_eq!(
+ reemit!(macro_stringify!(Hello, world!)),
+ "reemit!(macro_stringify!(Hello, world!))"
+ );
+ let r = "reemit!(assert_eq!(macro_stringify!(Hello, world!), r));";
+ reemit!(assert_eq!(macro_stringify!(Hello, world!), r));
+
+ assert_eq!(macro_stringify!(
+ Hello,
+ world!
+ ), "Hello,\n world!");
+
+ assert_eq!(macro_stringify!(Hello, /*world */ !), "Hello, /*world */ !");
+ assert_eq!(macro_stringify!(
+ Hello,
+ // comment
+ world!
+ ), "Hello,\n // comment\n world!");
+
+ assert_eq!(say_hello! { macro_stringify }, "\"Hello, world!\"");
+ assert_eq!(say_hello_extern! { macro_stringify }, "\"Hello, world!\"");
+}
struct S<T>(T);
fn f() {
- let f = Some(Foo { _a: 42 }).map(|a| a as Foo::<i32>); //~ WARN unnecessary path disambiguator
- let g: Foo::<i32> = Foo { _a: 42 }; //~ WARN unnecessary path disambiguator
+ let f = Some(Foo { _a: 42 }).map(|a| a as Foo::<i32>);
+ let g: Foo::<i32> = Foo { _a: 42 };
- m!(S::<u8>); // OK, no warning
+ m!(S::<u8>);
}
+++ /dev/null
-warning: unnecessary path disambiguator
- --> $DIR/issue-36116.rs:20:50
- |
-LL | let f = Some(Foo { _a: 42 }).map(|a| a as Foo::<i32>);
- | ^^ try removing `::`
-
-warning: unnecessary path disambiguator
- --> $DIR/issue-36116.rs:21:15
- |
-LL | let g: Foo::<i32> = Foo { _a: 42 };
- | ^^ try removing `::`
-
| ^^^^^^^^^^
error: denote infinite loops with `loop { ... }`
- --> $DIR/lint-impl-fn.rs:18:25
+ --> $DIR/lint-impl-fn.rs:27:5
|
-LL | fn foo(&self) { while true {} }
- | ^^^^^^^^^^ help: use `loop`
+LL | while true {}
+ | ^^^^^^^^^^ help: use `loop`
|
note: lint level defined here
- --> $DIR/lint-impl-fn.rs:13:8
+ --> $DIR/lint-impl-fn.rs:25:8
|
LL | #[deny(while_true)]
| ^^^^^^^^^^
error: denote infinite loops with `loop { ... }`
- --> $DIR/lint-impl-fn.rs:27:5
+ --> $DIR/lint-impl-fn.rs:18:25
|
-LL | while true {}
- | ^^^^^^^^^^ help: use `loop`
+LL | fn foo(&self) { while true {} }
+ | ^^^^^^^^^^ help: use `loop`
|
note: lint level defined here
- --> $DIR/lint-impl-fn.rs:25:8
+ --> $DIR/lint-impl-fn.rs:13:8
|
LL | #[deny(while_true)]
| ^^^^^^^^^^
|
= note: #[warn(no_mangle_generic_items)] on by default
+warning: denote infinite loops with `loop { ... }`
+ --> $DIR/suggestions.rs:46:5
+ |
+LL | while true {
+ | ^^^^^^^^^^ help: use `loop`
+ |
+ = note: #[warn(while_true)] on by default
+
+warning: the `warp_factor:` in this pattern is redundant
+ --> $DIR/suggestions.rs:61:23
+ |
+LL | Equinox { warp_factor: warp_factor } => {}
+ | ------------^^^^^^^^^^^^
+ | |
+ | help: remove this
+ |
+ = note: #[warn(non_shorthand_field_patterns)] on by default
+
error: const items should never be #[no_mangle]
--> $DIR/suggestions.rs:22:18
|
| |
| help: remove this attribute
-warning: denote infinite loops with `loop { ... }`
- --> $DIR/suggestions.rs:46:5
- |
-LL | while true {
- | ^^^^^^^^^^ help: use `loop`
- |
- = note: #[warn(while_true)] on by default
-
-warning: the `warp_factor:` in this pattern is redundant
- --> $DIR/suggestions.rs:61:23
- |
-LL | Equinox { warp_factor: warp_factor } => {}
- | ------------^^^^^^^^^^^^
- | |
- | help: remove this
- |
- = note: #[warn(non_shorthand_field_patterns)] on by default
-
error: aborting due to 3 previous errors
extern
- "C"suffix //~ ERROR ABI spec with a suffix is invalid
+ "C"suffix //~ ERROR suffixes on an ABI spec are invalid
fn foo() {}
extern
- "C"suffix //~ ERROR ABI spec with a suffix is invalid
+ "C"suffix //~ ERROR suffixes on an ABI spec are invalid
{}
fn main() {
- ""suffix; //~ ERROR string literal with a suffix is invalid
- b""suffix; //~ ERROR byte string literal with a suffix is invalid
- r#""#suffix; //~ ERROR string literal with a suffix is invalid
- br#""#suffix; //~ ERROR byte string literal with a suffix is invalid
- 'a'suffix; //~ ERROR char literal with a suffix is invalid
- b'a'suffix; //~ ERROR byte literal with a suffix is invalid
+ ""suffix; //~ ERROR suffixes on a string literal are invalid
+ b""suffix; //~ ERROR suffixes on a byte string literal are invalid
+ r#""#suffix; //~ ERROR suffixes on a string literal are invalid
+ br#""#suffix; //~ ERROR suffixes on a byte string literal are invalid
+ 'a'suffix; //~ ERROR suffixes on a char literal are invalid
+ b'a'suffix; //~ ERROR suffixes on a byte literal are invalid
1234u1024; //~ ERROR invalid width `1024` for integer literal
1234i1024; //~ ERROR invalid width `1024` for integer literal
-error: ABI spec with a suffix is invalid
+error: suffixes on an ABI spec are invalid
--> $DIR/bad-lit-suffixes.rs:5:5
|
LL | "C"suffix
- | ^^^^^^^^^ ABI spec with a suffix is invalid
+ | ^^^^^^^^^ invalid suffix `suffix`
-error: ABI spec with a suffix is invalid
+error: suffixes on an ABI spec are invalid
--> $DIR/bad-lit-suffixes.rs:9:5
|
LL | "C"suffix
- | ^^^^^^^^^ ABI spec with a suffix is invalid
+ | ^^^^^^^^^ invalid suffix `suffix`
-error: string literal with a suffix is invalid
+error: suffixes on a string literal are invalid
--> $DIR/bad-lit-suffixes.rs:13:5
|
LL | ""suffix;
- | ^^^^^^^^ string literal with a suffix is invalid
+ | ^^^^^^^^ invalid suffix `suffix`
-error: byte string literal with a suffix is invalid
+error: suffixes on a byte string literal are invalid
--> $DIR/bad-lit-suffixes.rs:14:5
|
LL | b""suffix;
- | ^^^^^^^^^ byte string literal with a suffix is invalid
+ | ^^^^^^^^^ invalid suffix `suffix`
-error: string literal with a suffix is invalid
+error: suffixes on a string literal are invalid
--> $DIR/bad-lit-suffixes.rs:15:5
|
LL | r#""#suffix;
- | ^^^^^^^^^^^ string literal with a suffix is invalid
+ | ^^^^^^^^^^^ invalid suffix `suffix`
-error: byte string literal with a suffix is invalid
+error: suffixes on a byte string literal are invalid
--> $DIR/bad-lit-suffixes.rs:16:5
|
LL | br#""#suffix;
- | ^^^^^^^^^^^^ byte string literal with a suffix is invalid
+ | ^^^^^^^^^^^^ invalid suffix `suffix`
-error: char literal with a suffix is invalid
+error: suffixes on a char literal are invalid
--> $DIR/bad-lit-suffixes.rs:17:5
|
LL | 'a'suffix;
- | ^^^^^^^^^ char literal with a suffix is invalid
+ | ^^^^^^^^^ invalid suffix `suffix`
-error: byte literal with a suffix is invalid
+error: suffixes on a byte literal are invalid
--> $DIR/bad-lit-suffixes.rs:18:5
|
LL | b'a'suffix;
- | ^^^^^^^^^^ byte literal with a suffix is invalid
+ | ^^^^^^^^^^ invalid suffix `suffix`
error: invalid width `1024` for integer literal
--> $DIR/bad-lit-suffixes.rs:20:5
--- /dev/null
+struct X(i32,i32,i32);
+
+fn main() {
+ let a = X(1, 2, 3);
+ let b = a.1suffix;
+ //~^ ERROR suffixes on a tuple index are invalid
+ println!("{}", b);
+ let c = (1, 2, 3);
+ let d = c.1suffix;
+ //~^ ERROR suffixes on a tuple index are invalid
+ println!("{}", d);
+ let s = X { 0suffix: 0, 1: 1, 2: 2 };
+ //~^ ERROR suffixes on a tuple index are invalid
+ match s {
+ X { 0suffix: _, .. } => {}
+ //~^ ERROR suffixes on a tuple index are invalid
+ }
+}
--- /dev/null
+error: suffixes on a tuple index are invalid
+ --> $DIR/issue-59418.rs:5:15
+ |
+LL | let b = a.1suffix;
+ | ^^^^^^^ invalid suffix `suffix`
+
+error: suffixes on a tuple index are invalid
+ --> $DIR/issue-59418.rs:9:15
+ |
+LL | let d = c.1suffix;
+ | ^^^^^^^ invalid suffix `suffix`
+
+error: suffixes on a tuple index are invalid
+ --> $DIR/issue-59418.rs:12:17
+ |
+LL | let s = X { 0suffix: 0, 1: 1, 2: 2 };
+ | ^^^^^^^ invalid suffix `suffix`
+
+error: suffixes on a tuple index are invalid
+ --> $DIR/issue-59418.rs:15:13
+ |
+LL | X { 0suffix: _, .. } => {}
+ | ^^^^^^^ invalid suffix `suffix`
+
+error: aborting due to 4 previous errors
+
--- /dev/null
+fn foo<'a, T>(_t: T) where T: Into<&'a str> {}
+
+fn main() {
+ foo(String::new());
+ //~^ ERROR the trait bound `&str: std::convert::From<std::string::String>` is not satisfied
+}
--- /dev/null
+error[E0277]: the trait bound `&str: std::convert::From<std::string::String>` is not satisfied
+ --> $DIR/into-str.rs:4:5
+ |
+LL | foo(String::new());
+ | ^^^ the trait `std::convert::From<std::string::String>` is not implemented for `&str`
+ |
+ = note: to coerce a `std::string::String` into a `&str`, use `&*` as a prefix
+ = note: required because of the requirements on the impl of `std::convert::Into<&str>` for `std::string::String`
+note: required by `foo`
+ --> $DIR/into-str.rs:1:1
+ |
+LL | fn foo<'a, T>(_t: T) where T: Into<&'a str> {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
--- /dev/null
+// The purpose of this text is to ensure that we get good
+// diagnostics when a `bool` is expected but that due to
+// an assignment expression `x = y` the type is `()`.
+
+fn main() {
+ let _: bool = 0 = 0; //~ ERROR mismatched types [E0308]
+
+ let _: bool = match 0 {
+ 0 => 0 = 0, //~ ERROR mismatched types [E0308]
+ _ => 0 = 0, //~ ERROR mismatched types [E0308]
+ };
+
+ let _: bool = match true {
+ true => 0 = 0, //~ ERROR mismatched types [E0308]
+ _ => (),
+ };
+
+ if 0 = 0 {} //~ ERROR mismatched types [E0308]
+
+ let _: bool = if { 0 = 0 } { //~ ERROR mismatched types [E0308]
+ 0 = 0 //~ ERROR mismatched types [E0308]
+ } else {
+ 0 = 0 //~ ERROR mismatched types [E0308]
+ };
+
+ let _ = (0 = 0) //~ ERROR mismatched types [E0308]
+ && { 0 = 0 } //~ ERROR mismatched types [E0308]
+ || (0 = 0); //~ ERROR mismatched types [E0308]
+
+ // A test to check that not expecting `bool` behaves well:
+ let _: usize = 0 = 0;
+ //~^ ERROR mismatched types [E0308]
+ //~| ERROR invalid left-hand side expression [E0070]
+}
--- /dev/null
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:6:19
+ |
+LL | let _: bool = 0 = 0;
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:9:14
+ |
+LL | 0 => 0 = 0,
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:10:14
+ |
+LL | _ => 0 = 0,
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:14:17
+ |
+LL | true => 0 = 0,
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:18:8
+ |
+LL | if 0 = 0 {}
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:20:24
+ |
+LL | let _: bool = if { 0 = 0 } {
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:21:9
+ |
+LL | 0 = 0
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:23:9
+ |
+LL | 0 = 0
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:26:13
+ |
+LL | let _ = (0 = 0)
+ | ^^^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:27:14
+ |
+LL | && { 0 = 0 }
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:28:12
+ |
+LL | || (0 = 0);
+ | ^^^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `0 == 0`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error[E0070]: invalid left-hand side expression
+ --> $DIR/assignment-expected-bool.rs:31:20
+ |
+LL | let _: usize = 0 = 0;
+ | ^^^^^ left-hand of expression not valid
+
+error[E0308]: mismatched types
+ --> $DIR/assignment-expected-bool.rs:31:20
+ |
+LL | let _: usize = 0 = 0;
+ | ^^^^^ expected usize, found ()
+ |
+ = note: expected type `usize`
+ found type `()`
+
+error: aborting due to 13 previous errors
+
+Some errors occurred: E0070, E0308.
+For more information about an error, try `rustc --explain E0070`.
//~^ ERROR mismatched types
println!("{}", x);
}
- if (if true { x = 4 } else { x = 5 }) {
- //~^ ERROR mismatched types
+ if (
+ if true {
+ x = 4 //~ ERROR mismatched types
+ } else {
+ x = 5 //~ ERROR mismatched types
+ }
+ ) {
println!("{}", x);
}
}
found type `()`
error[E0308]: mismatched types
- --> $DIR/assignment-in-if.rs:34:8
+ --> $DIR/assignment-in-if.rs:36:13
|
-LL | if (if true { x = 4 } else { x = 5 }) {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected bool, found ()
+LL | x = 4
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `x == 4`
|
= note: expected type `bool`
found type `()`
-error: aborting due to 5 previous errors
+error[E0308]: mismatched types
+ --> $DIR/assignment-in-if.rs:38:13
+ |
+LL | x = 5
+ | ^^^^^
+ | |
+ | expected bool, found ()
+ | help: try comparing for equality: `x == 5`
+ |
+ = note: expected type `bool`
+ found type `()`
+
+error: aborting due to 6 previous errors
For more information about this error, try `rustc --explain E0308`.
}
}
+/// Configuration for compiletest
#[derive(Clone)]
pub struct Config {
/// `true` to to overwrite stderr/stdout files instead of complaining about changes in output.
pub linker: Option<String>,
pub llvm_components: String,
pub llvm_cxxflags: String,
+
+ /// Path to a NodeJS executable. Used for JS doctests, emscripten and WASM tests
pub nodejs: Option<String>,
}
pub normalize_stdout: Vec<(String, String)>,
pub normalize_stderr: Vec<(String, String)>,
pub failure_status: i32,
+ // Whether or not `rustfix` should apply the `CodeSuggestion`s of this test and compile the
+ // resulting Rust code.
pub run_rustfix: bool,
+ // If true, `rustfix` will only apply `MachineApplicable` suggestions.
pub rustfix_only_machine_applicable: bool,
pub assembly_output: Option<String>,
}
+//! These structs are a subset of the ones found in `syntax::json`.
+//! They are only used for deserialization of JSON output provided by libtest.
+
use crate::errors::{Error, ErrorKind};
use crate::runtest::ProcRes;
use serde_json;
use std::path::Path;
use std::str::FromStr;
-// These structs are a subset of the ones found in
-// `syntax::json`.
-
#[derive(Deserialize)]
struct Diagnostic {
message: String,
Ok(())
}
+
+/// Returns true if `file_name` looks like a proper test file name.
pub fn is_test(file_name: &OsString) -> bool {
let file_name = file_name.to_str().unwrap();
7012050: "GNU gdb (GDB) 7.12.50.20161027-git",
}
}
+
+#[test]
+fn is_test_test() {
+ assert_eq!(true, is_test(&OsString::from("a_test.rs")));
+ assert_eq!(false, is_test(&OsString::from(".a_test.rs")));
+ assert_eq!(false, is_test(&OsString::from("a_cat.gif")));
+ assert_eq!(false, is_test(&OsString::from("#a_dog_gif")));
+ assert_eq!(false, is_test(&OsString::from("~a_temp_file")));
+}