+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
[[package]]
name = "adler32"
version = "1.0.3"
-Subproject commit d663113d1d9fbd35f1145c29f6080a6350b7f419
+Subproject commit bd2778f304989ee52be8201504d6ec621dd60ca9
# Unstable features
Rustdoc is under active development, and like the Rust compiler, some features are only available
-on the nightly releases. Some of these are new and need some more testing before they're able to get
-released to the world at large, and some of them are tied to features in the Rust compiler that are
-themselves unstable. Several features here require a matching `#![feature(...)]` attribute to
+on nightly releases. Some of these features are new and need some more testing before they're able to be
+released to the world at large, and some of them are tied to features in the Rust compiler that are unstable. Several features here require a matching `#![feature(...)]` attribute to
enable, and thus are more fully documented in the [Unstable Book]. Those sections will link over
there as necessary.
This flag allows you to keep doctest executables around after they're compiled or run.
Usually, rustdoc will immediately discard a compiled doctest after it's been tested, but
-with this option, you can keep those binaries around for farther testing.
\ No newline at end of file
+with this option, you can keep those binaries around for farther testing.
#[cfg(not(test))]
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
-#[allow_internal_unstable]
+#[cfg_attr(not(stage0), allow_internal_unstable(box_syntax))]
+#[cfg_attr(stage0, allow_internal_unstable)]
macro_rules! vec {
($elem:expr; $n:expr) => (
$crate::vec::from_elem($elem, $n)
authors = ["The Rust Project Developers"]
name = "arena"
version = "0.0.0"
+edition = "2018"
[lib]
name = "arena"
crate-type = ["dylib"]
[dependencies]
-rustc_data_structures = { path = "../librustc_data_structures" }
\ No newline at end of file
+rustc_data_structures = { path = "../librustc_data_structures" }
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/",
test(no_crate_inject, attr(deny(warnings))))]
+#![deny(rust_2018_idioms)]
+
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
-#![feature(nll)]
#![feature(raw_vec_internals)]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]
extern crate alloc;
-extern crate rustc_data_structures;
use rustc_data_structures::sync::MTLock;
#[cfg(test)]
mod tests {
extern crate test;
- use self::test::Bencher;
+ use test::Bencher;
use super::TypedArena;
use std::cell::Cell;
impl<'a> Wrap<'a> {
fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
- let r: &EI = self.0.alloc(EI::I(f()));
+ let r: &EI<'_> = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
} else {
panic!("mismatch");
}
}
- fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
- let r: &EI = self.0.alloc(EI::O(f()));
+ fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer<'_> {
+ let r: &EI<'_> = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
} else {
count: &'a Cell<u32>,
}
- impl<'a> Drop for DropCounter<'a> {
+ impl Drop for DropCounter<'_> {
fn drop(&mut self) {
self.count.set(self.count.get() + 1);
}
fn test_typed_arena_drop_count() {
let counter = Cell::new(0);
{
- let arena: TypedArena<DropCounter> = TypedArena::default();
+ let arena: TypedArena<DropCounter<'_>> = TypedArena::default();
for _ in 0..100 {
// Allocate something with drop glue to make sure it doesn't leak.
arena.alloc(DropCounter { count: &counter });
#[test]
fn test_typed_arena_drop_on_clear() {
let counter = Cell::new(0);
- let mut arena: TypedArena<DropCounter> = TypedArena::default();
+ let mut arena: TypedArena<DropCounter<'_>> = TypedArena::default();
for i in 0..10 {
for _ in 0..100 {
// Allocate something with drop glue to make sure it doesn't leak.
//! [`TryFrom<T>`][`TryFrom`] rather than [`Into<U>`][`Into`] or [`TryInto<U>`][`TryInto`],
//! as [`From`] and [`TryFrom`] provide greater flexibility and offer
//! equivalent [`Into`] or [`TryInto`] implementations for free, thanks to a
-//! blanket implementation in the standard library.
+//! blanket implementation in the standard library. However, there are some cases
+//! where this is not possible, such as creating conversions into a type defined
+//! outside your library, so implementing [`Into`] instead of [`From`] is
+//! sometimes necessary.
//!
//! # Generic Implementations
//!
//! often generated by LLVM. Additionally, this library can make explicit
//! calls to these functions. Their signatures are the same as found in C.
//! These functions are often provided by the system libc, but can also be
-//! provided by the [rlibc crate](https://crates.io/crates/rlibc).
+//! provided by the [compiler-builtins crate](https://crates.io/crates/compiler_builtins).
//!
//! * `rust_begin_panic` - This function takes four arguments, a
//! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments
/// Entry point of thread panic. For details, see `std::macros`.
#[macro_export]
-#[allow_internal_unstable]
+#[cfg_attr(not(stage0), allow_internal_unstable(core_panic, __rust_unstable_column))]
+#[cfg_attr(stage0, allow_internal_unstable)]
#[stable(feature = "core", since = "1.6.0")]
macro_rules! panic {
() => (
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(format_args_nl))]
macro_rules! writeln {
($dst:expr) => (
write!($dst, "\n")
```"),
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
+ #[cfg(stage0)]
pub fn saturating_add(self, rhs: Self) -> Self {
- #[cfg(stage0)]
match self.checked_add(rhs) {
Some(x) => x,
None if rhs >= 0 => Self::max_value(),
None => Self::min_value(),
}
- #[cfg(not(stage0))]
- {
- intrinsics::saturating_add(self, rhs)
- }
+ }
+
+ }
+
+ doc_comment! {
+ concat!("Saturating integer addition. Computes `self + rhs`, saturating at the numeric
+bounds instead of overflowing.
+
+# Examples
+
+Basic usage:
+
+```
+", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_add(1), 101);
+assert_eq!(", stringify!($SelfT), "::max_value().saturating_add(100), ", stringify!($SelfT),
+"::max_value());",
+$EndFeature, "
+```"),
+
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_saturating_int_methods")]
+ #[inline]
+ #[cfg(not(stage0))]
+ pub const fn saturating_add(self, rhs: Self) -> Self {
+ intrinsics::saturating_add(self, rhs)
}
}
```"),
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
+ #[cfg(stage0)]
pub fn saturating_sub(self, rhs: Self) -> Self {
- #[cfg(stage0)]
match self.checked_sub(rhs) {
Some(x) => x,
None if rhs >= 0 => Self::min_value(),
None => Self::max_value(),
}
- #[cfg(not(stage0))]
- {
- intrinsics::saturating_sub(self, rhs)
- }
+ }
+ }
+
+ doc_comment! {
+ concat!("Saturating integer subtraction. Computes `self - rhs`, saturating at the
+numeric bounds instead of overflowing.
+
+# Examples
+
+Basic usage:
+
+```
+", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_sub(127), -27);
+assert_eq!(", stringify!($SelfT), "::min_value().saturating_sub(100), ", stringify!($SelfT),
+"::min_value());",
+$EndFeature, "
+```"),
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_saturating_int_methods")]
+ #[inline]
+ #[cfg(not(stage0))]
+ pub const fn saturating_sub(self, rhs: Self) -> Self {
+ intrinsics::saturating_sub(self, rhs)
}
}
```"),
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
+ #[cfg(stage0)]
pub fn saturating_add(self, rhs: Self) -> Self {
- #[cfg(stage0)]
match self.checked_add(rhs) {
Some(x) => x,
None => Self::max_value(),
}
- #[cfg(not(stage0))]
- {
- intrinsics::saturating_add(self, rhs)
- }
+ }
+ }
+
+ doc_comment! {
+ concat!("Saturating integer addition. Computes `self + rhs`, saturating at
+the numeric bounds instead of overflowing.
+
+# Examples
+
+Basic usage:
+
+```
+", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_add(1), 101);
+assert_eq!(200u8.saturating_add(127), 255);", $EndFeature, "
+```"),
+
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_saturating_int_methods")]
+ #[inline]
+ #[cfg(not(stage0))]
+ pub const fn saturating_add(self, rhs: Self) -> Self {
+ intrinsics::saturating_add(self, rhs)
}
}
```"),
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
+ #[cfg(stage0)]
pub fn saturating_sub(self, rhs: Self) -> Self {
- #[cfg(stage0)]
match self.checked_sub(rhs) {
Some(x) => x,
None => Self::min_value(),
}
- #[cfg(not(stage0))]
- {
- intrinsics::saturating_sub(self, rhs)
- }
+ }
+ }
+
+ doc_comment! {
+ concat!("Saturating integer subtraction. Computes `self - rhs`, saturating
+at the numeric bounds instead of overflowing.
+
+# Examples
+
+Basic usage:
+
+```
+", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_sub(27), 73);
+assert_eq!(13", stringify!($SelfT), ".saturating_sub(127), 0);", $EndFeature, "
+```"),
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_saturating_int_methods")]
+ #[inline]
+ #[cfg(not(stage0))]
+ pub const fn saturating_sub(self, rhs: Self) -> Self {
+ intrinsics::saturating_sub(self, rhs)
}
}
label="expected an `Fn<{Args}>` closure, found `{Self}`",
)]
#[fundamental] // so that regex can rely that `&str: !FnMut`
-#[must_use]
+#[must_use = "closures are lazy and do nothing unless called"]
pub trait Fn<Args> : FnMut<Args> {
/// Performs the call operation.
#[unstable(feature = "fn_traits", issue = "29625")]
label="expected an `FnMut<{Args}>` closure, found `{Self}`",
)]
#[fundamental] // so that regex can rely that `&str: !FnMut`
-#[must_use]
+#[must_use = "closures are lazy and do nothing unless called"]
pub trait FnMut<Args> : FnOnce<Args> {
/// Performs the call operation.
#[unstable(feature = "fn_traits", issue = "29625")]
label="expected an `FnOnce<{Args}>` closure, found `{Self}`",
)]
#[fundamental] // so that regex can rely that `&str: !FnMut`
-#[must_use]
+#[must_use = "closures are lazy and do nothing unless called"]
pub trait FnOnce<Args> {
/// The returned type after the call operator is used.
#[stable(feature = "fn_once_output", since = "1.12.0")]
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(rustc_private)]
pub use Piece::*;
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(str_escape)]
use LabelText::*;
#![feature(core_intrinsics)]
#![feature(libc)]
+#![feature(nll)]
#![feature(panic_runtime)]
#![feature(staged_api)]
#![feature(rustc_attrs)]
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(staged_api)]
#![feature(const_fn)]
#![feature(extern_types)]
reason = "internal implementation detail of rustc right now",
issue = "0")]
#![allow(unused_features)]
+#![feature(nll)]
#![feature(staged_api)]
#![deny(rust_2018_idioms)]
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::thin_vec::ThinVec;
+use rustc_data_structures::sync::Lrc;
use crate::session::Session;
use crate::session::config::nightly_options;
use crate::util::common::FN_OUTPUT_NAME;
Ident::with_empty_ctxt(Symbol::gensym(s))
}
- fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span {
+ /// Reuses the span but adds information like the kind of the desugaring and features that are
+ /// allowed inside this span.
+ fn mark_span_with_reason(
+ &self,
+ reason: CompilerDesugaringKind,
+ span: Span,
+ allow_internal_unstable: Option<Lrc<[Symbol]>>,
+ ) -> Span {
let mark = Mark::fresh(Mark::root());
mark.set_expn_info(source_map::ExpnInfo {
call_site: span,
def_site: Some(span),
format: source_map::CompilerDesugaring(reason),
- allow_internal_unstable: true,
+ allow_internal_unstable,
allow_internal_unsafe: false,
local_inner_macros: false,
edition: source_map::hygiene::default_edition(),
attrs: ThinVec::new(),
};
- let unstable_span = self.allow_internal_unstable(CompilerDesugaringKind::Async, span);
+ let unstable_span = self.mark_span_with_reason(
+ CompilerDesugaringKind::Async,
+ span,
+ Some(vec![
+ Symbol::intern("gen_future"),
+ ].into()),
+ );
let gen_future = self.expr_std_path(
unstable_span, &["future", "from_generator"], None, ThinVec::new());
hir::ExprKind::Call(P(gen_future), hir_vec![generator])
// desugaring that explicitly states that we don't want to track that.
// Not tracking it makes lints in rustc and clippy very fragile as
// frequently opened issues show.
- let exist_ty_span = self.allow_internal_unstable(
+ let exist_ty_span = self.mark_span_with_reason(
CompilerDesugaringKind::ExistentialReturnType,
span,
+ None,
);
let exist_ty_def_index = self
}),
ExprKind::TryBlock(ref body) => {
self.with_catch_scope(body.id, |this| {
- let unstable_span =
- this.allow_internal_unstable(CompilerDesugaringKind::TryBlock, body.span);
+ let unstable_span = this.mark_span_with_reason(
+ CompilerDesugaringKind::TryBlock,
+ body.span,
+ Some(vec![
+ Symbol::intern("try_trait"),
+ ].into()),
+ );
let mut block = this.lower_block(body, true).into_inner();
let tail = block.expr.take().map_or_else(
|| {
// expand <head>
let head = self.lower_expr(head);
let head_sp = head.span;
- let desugared_span = self.allow_internal_unstable(
+ let desugared_span = self.mark_span_with_reason(
CompilerDesugaringKind::ForLoop,
head_sp,
+ None,
);
let iter = self.str_to_ident("iter");
// return Try::from_error(From::from(err)),
// }
- let unstable_span =
- self.allow_internal_unstable(CompilerDesugaringKind::QuestionMark, e.span);
+ let unstable_span = self.mark_span_with_reason(
+ CompilerDesugaringKind::QuestionMark,
+ e.span,
+ Some(vec![
+ Symbol::intern("try_trait")
+ ].into()),
+ );
// `Try::into_result(<expr>)`
let discr = {
}
}
- pub fn expect_variant_data(&self, id: NodeId) -> &'hir VariantData {
+ pub fn expect_variant_data(&self, id: HirId) -> &'hir VariantData {
+ let id = self.hir_to_node_id(id); // FIXME(@ljedrz): remove when possible
+
match self.find(id) {
Some(Node::Item(i)) => {
match i.node {
}
}
- pub fn expect_variant(&self, id: NodeId) -> &'hir Variant {
+ pub fn expect_variant(&self, id: HirId) -> &'hir Variant {
+ let id = self.hir_to_node_id(id); // FIXME(@ljedrz): remove when possible
+
match self.find(id) {
Some(Node::Variant(variant)) => variant,
_ => bug!("expected variant, found {}", self.node_to_string(id)),
value.visit_with(&mut r)
}
- pub fn resolve_type_and_region_vars_if_possible<T>(&self, value: &T) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- let mut r = resolve::OpportunisticTypeAndRegionResolver::new(self);
- value.fold_with(&mut r)
- }
-
pub fn fully_resolve<T: TypeFoldable<'tcx>>(&self, value: &T) -> FixupResult<T> {
/*!
* Attempts to resolve all type/region variables in
MacroExpandedMacroExportsAccessedByAbsolutePaths(Span),
ElidedLifetimesInPaths(usize, Span, bool, Span, String),
UnknownCrateTypes(Span, String, String),
+ UnusedImports(String, Vec<(Span, String)>),
}
impl BuiltinLintDiagnostics {
BuiltinLintDiagnostics::UnknownCrateTypes(span, note, sugg) => {
db.span_suggestion(span, ¬e, sugg, Applicability::MaybeIncorrect);
}
+ BuiltinLintDiagnostics::UnusedImports(message, replaces) => {
+ if !replaces.is_empty() {
+ db.multipart_suggestion(
+ &message,
+ replaces,
+ Applicability::MachineApplicable,
+ );
+ }
+ }
}
}
}
#[macro_export]
macro_rules! impl_stable_hash_for {
// Enums
- // FIXME(mark-i-m): Some of these should be `?` rather than `*`. See the git blame and change
- // them back when `?` is supported again.
(enum $enum_name:path {
$( $variant:ident
// this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
// when it should be only one or the other
- $( ( $($field:ident $(-> $delegate:tt)*),* ) )*
- $( { $($named_field:ident $(-> $named_delegate:tt)*),* } )*
- ),* $(,)*
+ $( ( $($field:ident $(-> $delegate:tt)?),* ) )?
+ $( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
+ ),* $(,)?
}) => {
impl_stable_hash_for!(
impl<> for enum $enum_name [ $enum_name ] { $( $variant
- $( ( $($field $(-> $delegate)*),* ) )*
- $( { $($named_field $(-> $named_delegate)*),* } )*
+ $( ( $($field $(-> $delegate)?),* ) )?
+ $( { $($named_field $(-> $named_delegate)?),* } )?
),* }
);
};
// We want to use the enum name both in the `impl ... for $enum_name` as well as for
// importing all the variants. Unfortunately it seems we have to take the name
// twice for this purpose
- (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*>
+ (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
for enum $enum_name:path
[ $enum_path:path ]
{
$( $variant:ident
// this incorrectly allows specifying both tuple-like and struct-like fields, as in `Variant(a,b){c,d}`,
// when it should be only one or the other
- $( ( $($field:ident $(-> $delegate:tt)*),* ) )*
- $( { $($named_field:ident $(-> $named_delegate:tt)*),* } )*
- ),* $(,)*
+ $( ( $($field:ident $(-> $delegate:tt)?),* ) )?
+ $( { $($named_field:ident $(-> $named_delegate:tt)?),* } )?
+ ),* $(,)?
}) => {
- impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+ impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>
for $enum_name
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
match *self {
$(
- $variant $( ( $(ref $field),* ) )* $( { $(ref $named_field),* } )* => {
- $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*)*
- $($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)*) );*)*
+ $variant $( ( $(ref $field),* ) )? $( { $(ref $named_field),* } )? => {
+ $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)?
+ $($( __impl_stable_hash_field!($named_field, __ctx, __hasher $(, $named_delegate)?) );*)?
}
)*
}
}
};
// Structs
- // FIXME(mark-i-m): same here.
- (struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => {
+ (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl_stable_hash_for!(
- impl<'tcx> for struct $struct_name { $($field $(-> $delegate)*),* }
+ impl<'tcx> for struct $struct_name { $($field $(-> $delegate)?),* }
);
};
- (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*> for struct $struct_name:path {
- $($field:ident $(-> $delegate:tt)*),* $(,)*
+ (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?> for struct $struct_name:path {
+ $($field:ident $(-> $delegate:tt)?),* $(,)?
}) => {
- impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+ impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
{
$(ref $field),*
} = *self;
- $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
// Tuple structs
- // We cannot use normale parentheses here, the parser won't allow it
- // FIXME(mark-i-m): same here.
- (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => {
+ // We cannot use normal parentheses here, the parser won't allow it
+ (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl_stable_hash_for!(
- impl<'tcx> for tuple_struct $struct_name { $($field $(-> $delegate)*),* }
+ impl<'tcx> for tuple_struct $struct_name { $($field $(-> $delegate)?),* }
);
};
- (impl<$($lt:lifetime $(: $lt_bound:lifetime)* ),* $(,)* $($T:ident),* $(,)*>
- for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => {
- impl<'a, $($lt $(: $lt_bound)*,)* $($T,)*>
+ (impl<$($lt:lifetime $(: $lt_bound:lifetime)? ),* $(,)? $($T:ident),* $(,)?>
+ for tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
+ impl<'a, $($lt $(: $lt_bound)?,)* $($T,)*>
::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name
where $($T: ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>>),*
{
$(ref $field),*
) = *self;
- $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
/// deprecated. If the item is indeed deprecated, we will emit a deprecation lint attached to
/// `id`.
pub fn eval_stability(self, def_id: DefId, id: Option<NodeId>, span: Span) -> EvalResult {
- if span.allows_unstable() {
- debug!("stability: skipping span={:?} since it is internal", span);
- return EvalResult::Allow;
- }
-
let lint_deprecated = |def_id: DefId,
id: NodeId,
note: Option<Symbol>,
match stability {
Some(&Stability { level: attr::Unstable { reason, issue }, feature, .. }) => {
+ if span.allows_unstable(&feature.as_str()) {
+ debug!("stability: skipping span={:?} since it is internal", span);
+ return EvalResult::Allow;
+ }
if self.stability().active_features.contains(&feature) {
return EvalResult::Allow;
}
use std::fmt;
use crate::mir;
use crate::hir::def_id::DefId;
-use crate::ty::{self, TyCtxt, Instance};
+use crate::ty::{self, TyCtxt, Instance, subst::UnpackedKind};
use crate::ty::layout::{self, Size};
use std::io;
use crate::rustc_serialize::{Encoder, Decodable, Encodable};
id
}
- /// Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
- /// by the linker and functions can be duplicated across crates.
- /// We thus generate a new `AllocId` for every mention of a function. This means that
- /// `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> AllocId {
- let id = self.reserve();
- self.id_to_kind.insert(id, AllocKind::Function(instance));
- id
+ // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
+ // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
+ // duplicated across crates.
+ // We thus generate a new `AllocId` for every mention of a function. This means that
+ // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
+ // However, formatting code relies on function identity (see #58320), so we only do
+ // this for generic functions. Lifetime parameters are ignored.
+ let is_generic = instance.substs.into_iter().any(|kind| {
+ match kind.unpack() {
+ UnpackedKind::Lifetime(_) => false,
+ _ => true,
+ }
+ });
+ if is_generic {
+ // Get a fresh ID
+ let id = self.reserve();
+ self.id_to_kind.insert(id, AllocKind::Function(instance));
+ id
+ } else {
+ // Deduplicate
+ self.intern(AllocKind::Function(instance))
+ }
}
/// Returns `None` in case the `AllocId` is dangling. An `EvalContext` can still have a
// ```rust
// fn super_basic_block_data(&mut self,
// block: BasicBlock,
-// data: & $($mutability)* BasicBlockData<'tcx>) {
+// data: & $($mutability)? BasicBlockData<'tcx>) {
// let BasicBlockData {
-// ref $($mutability)* statements,
-// ref $($mutability)* terminator,
+// statements,
+// terminator,
// is_cleanup: _
// } = *data;
//
// `is_cleanup` above.
macro_rules! make_mir_visitor {
- ($visitor_trait_name:ident, $($mutability:ident)*) => {
+ ($visitor_trait_name:ident, $($mutability:ident)?) => {
pub trait $visitor_trait_name<'tcx> {
// Override these, and call `self.super_xxx` to revert back to the
// default behavior.
- fn visit_mir(&mut self, mir: & $($mutability)* Mir<'tcx>) {
+ fn visit_mir(&mut self, mir: & $($mutability)? Mir<'tcx>) {
self.super_mir(mir);
}
fn visit_basic_block_data(&mut self,
block: BasicBlock,
- data: & $($mutability)* BasicBlockData<'tcx>) {
+ data: & $($mutability)? BasicBlockData<'tcx>) {
self.super_basic_block_data(block, data);
}
fn visit_source_scope_data(&mut self,
- scope_data: & $($mutability)* SourceScopeData) {
+ scope_data: & $($mutability)? SourceScopeData) {
self.super_source_scope_data(scope_data);
}
fn visit_statement(&mut self,
block: BasicBlock,
- statement: & $($mutability)* Statement<'tcx>,
+ statement: & $($mutability)? Statement<'tcx>,
location: Location) {
self.super_statement(block, statement, location);
}
fn visit_assign(&mut self,
block: BasicBlock,
- place: & $($mutability)* Place<'tcx>,
- rvalue: & $($mutability)* Rvalue<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
+ rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
self.super_assign(block, place, rvalue, location);
}
fn visit_terminator(&mut self,
block: BasicBlock,
- terminator: & $($mutability)* Terminator<'tcx>,
+ terminator: & $($mutability)? Terminator<'tcx>,
location: Location) {
self.super_terminator(block, terminator, location);
}
fn visit_terminator_kind(&mut self,
block: BasicBlock,
- kind: & $($mutability)* TerminatorKind<'tcx>,
+ kind: & $($mutability)? TerminatorKind<'tcx>,
location: Location) {
self.super_terminator_kind(block, kind, location);
}
fn visit_assert_message(&mut self,
- msg: & $($mutability)* AssertMessage<'tcx>,
+ msg: & $($mutability)? AssertMessage<'tcx>,
location: Location) {
self.super_assert_message(msg, location);
}
fn visit_rvalue(&mut self,
- rvalue: & $($mutability)* Rvalue<'tcx>,
+ rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
self.super_rvalue(rvalue, location);
}
fn visit_operand(&mut self,
- operand: & $($mutability)* Operand<'tcx>,
+ operand: & $($mutability)? Operand<'tcx>,
location: Location) {
self.super_operand(operand, location);
}
fn visit_ascribe_user_ty(&mut self,
- place: & $($mutability)* Place<'tcx>,
- variance: & $($mutability)* ty::Variance,
- user_ty: & $($mutability)* UserTypeProjection<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
+ variance: & $($mutability)? ty::Variance,
+ user_ty: & $($mutability)? UserTypeProjection<'tcx>,
location: Location) {
self.super_ascribe_user_ty(place, variance, user_ty, location);
}
fn visit_retag(&mut self,
- kind: & $($mutability)* RetagKind,
- place: & $($mutability)* Place<'tcx>,
+ kind: & $($mutability)? RetagKind,
+ place: & $($mutability)? Place<'tcx>,
location: Location) {
self.super_retag(kind, place, location);
}
fn visit_place(&mut self,
- place: & $($mutability)* Place<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
self.super_place(place, context, location);
}
fn visit_static(&mut self,
- static_: & $($mutability)* Static<'tcx>,
+ static_: & $($mutability)? Static<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
self.super_static(static_, context, location);
}
fn visit_projection(&mut self,
- place: & $($mutability)* PlaceProjection<'tcx>,
+ place: & $($mutability)? PlaceProjection<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
self.super_projection(place, context, location);
}
fn visit_projection_elem(&mut self,
- place: & $($mutability)* PlaceElem<'tcx>,
+ place: & $($mutability)? PlaceElem<'tcx>,
location: Location) {
self.super_projection_elem(place, location);
}
}
fn visit_constant(&mut self,
- constant: & $($mutability)* Constant<'tcx>,
+ constant: & $($mutability)? Constant<'tcx>,
location: Location) {
self.super_constant(constant, location);
}
fn visit_def_id(&mut self,
- def_id: & $($mutability)* DefId,
+ def_id: & $($mutability)? DefId,
_: Location) {
self.super_def_id(def_id);
}
fn visit_span(&mut self,
- span: & $($mutability)* Span) {
+ span: & $($mutability)? Span) {
self.super_span(span);
}
fn visit_source_info(&mut self,
- source_info: & $($mutability)* SourceInfo) {
+ source_info: & $($mutability)? SourceInfo) {
self.super_source_info(source_info);
}
fn visit_ty(&mut self,
- ty: & $($mutability)* Ty<'tcx>,
+ ty: & $($mutability)? Ty<'tcx>,
_: TyContext) {
self.super_ty(ty);
}
fn visit_user_type_projection(
&mut self,
- ty: & $($mutability)* UserTypeProjection<'tcx>,
+ ty: & $($mutability)? UserTypeProjection<'tcx>,
) {
self.super_user_type_projection(ty);
}
fn visit_user_type_annotation(
&mut self,
index: UserTypeAnnotationIndex,
- ty: & $($mutability)* CanonicalUserTypeAnnotation<'tcx>,
+ ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
) {
self.super_user_type_annotation(index, ty);
}
fn visit_region(&mut self,
- region: & $($mutability)* ty::Region<'tcx>,
+ region: & $($mutability)? ty::Region<'tcx>,
_: Location) {
self.super_region(region);
}
fn visit_const(&mut self,
- constant: & $($mutability)* &'tcx ty::LazyConst<'tcx>,
+ constant: & $($mutability)? &'tcx ty::LazyConst<'tcx>,
_: Location) {
self.super_const(constant);
}
fn visit_substs(&mut self,
- substs: & $($mutability)* &'tcx Substs<'tcx>,
+ substs: & $($mutability)? &'tcx Substs<'tcx>,
_: Location) {
self.super_substs(substs);
}
fn visit_closure_substs(&mut self,
- substs: & $($mutability)* ClosureSubsts<'tcx>,
+ substs: & $($mutability)? ClosureSubsts<'tcx>,
_: Location) {
self.super_closure_substs(substs);
}
fn visit_generator_substs(&mut self,
- substs: & $($mutability)* GeneratorSubsts<'tcx>,
+ substs: & $($mutability)? GeneratorSubsts<'tcx>,
_: Location) {
self.super_generator_substs(substs);
}
fn visit_local_decl(&mut self,
local: Local,
- local_decl: & $($mutability)* LocalDecl<'tcx>) {
+ local_decl: & $($mutability)? LocalDecl<'tcx>) {
self.super_local_decl(local, local_decl);
}
fn visit_local(&mut self,
- _local: & $($mutability)* Local,
+ _local: & $($mutability)? Local,
_context: PlaceContext<'tcx>,
_location: Location) {
}
fn visit_source_scope(&mut self,
- scope: & $($mutability)* SourceScope) {
+ scope: & $($mutability)? SourceScope) {
self.super_source_scope(scope);
}
// not meant to be overridden.
fn super_mir(&mut self,
- mir: & $($mutability)* Mir<'tcx>) {
- if let Some(yield_ty) = &$($mutability)* mir.yield_ty {
+ mir: & $($mutability)? Mir<'tcx>) {
+ if let Some(yield_ty) = &$($mutability)? mir.yield_ty {
self.visit_ty(yield_ty, TyContext::YieldTy(SourceInfo {
span: mir.span,
scope: OUTERMOST_SOURCE_SCOPE,
(mut) => (mir.basic_blocks_mut().iter_enumerated_mut());
() => (mir.basic_blocks().iter_enumerated());
};
- for (bb, data) in basic_blocks!($($mutability)*) {
+ for (bb, data) in basic_blocks!($($mutability)?) {
self.visit_basic_block_data(bb, data);
}
- for scope in &$($mutability)* mir.source_scopes {
+ for scope in &$($mutability)? mir.source_scopes {
self.visit_source_scope_data(scope);
}
- self.visit_ty(&$($mutability)* mir.return_ty(), TyContext::ReturnTy(SourceInfo {
+ self.visit_ty(&$($mutability)? mir.return_ty(), TyContext::ReturnTy(SourceInfo {
span: mir.span,
scope: OUTERMOST_SOURCE_SCOPE,
}));
for local in mir.local_decls.indices() {
- self.visit_local_decl(local, & $($mutability)* mir.local_decls[local]);
+ self.visit_local_decl(local, & $($mutability)? mir.local_decls[local]);
}
macro_rules! type_annotations {
() => (mir.user_type_annotations.iter_enumerated());
};
- for (index, annotation) in type_annotations!($($mutability)*) {
+ for (index, annotation) in type_annotations!($($mutability)?) {
self.visit_user_type_annotation(
index, annotation
);
}
- self.visit_span(&$($mutability)* mir.span);
+ self.visit_span(&$($mutability)? mir.span);
}
fn super_basic_block_data(&mut self,
block: BasicBlock,
- data: & $($mutability)* BasicBlockData<'tcx>) {
+ data: & $($mutability)? BasicBlockData<'tcx>) {
let BasicBlockData {
- ref $($mutability)* statements,
- ref $($mutability)* terminator,
+ statements,
+ terminator,
is_cleanup: _
- } = *data;
+ } = data;
let mut index = 0;
for statement in statements {
index += 1;
}
- if let Some(ref $($mutability)* terminator) = *terminator {
+ if let Some(terminator) = terminator {
let location = Location { block: block, statement_index: index };
self.visit_terminator(block, terminator, location);
}
}
- fn super_source_scope_data(&mut self,
- scope_data: & $($mutability)* SourceScopeData) {
+ fn super_source_scope_data(&mut self, scope_data: & $($mutability)? SourceScopeData) {
let SourceScopeData {
- ref $($mutability)* span,
- ref $($mutability)* parent_scope,
- } = *scope_data;
+ span,
+ parent_scope,
+ } = scope_data;
self.visit_span(span);
- if let Some(ref $($mutability)* parent_scope) = *parent_scope {
+ if let Some(parent_scope) = parent_scope {
self.visit_source_scope(parent_scope);
}
}
fn super_statement(&mut self,
block: BasicBlock,
- statement: & $($mutability)* Statement<'tcx>,
+ statement: & $($mutability)? Statement<'tcx>,
location: Location) {
let Statement {
- ref $($mutability)* source_info,
- ref $($mutability)* kind,
- } = *statement;
+ source_info,
+ kind,
+ } = statement;
self.visit_source_info(source_info);
- match *kind {
- StatementKind::Assign(ref $($mutability)* place,
- ref $($mutability)* rvalue) => {
+ match kind {
+ StatementKind::Assign(place, rvalue) => {
self.visit_assign(block, place, rvalue, location);
}
- StatementKind::FakeRead(_, ref $($mutability)* place) => {
+ StatementKind::FakeRead(_, place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
location
);
}
- StatementKind::SetDiscriminant{ ref $($mutability)* place, .. } => {
+ StatementKind::SetDiscriminant { place, .. } => {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Store),
location
);
}
- StatementKind::StorageLive(ref $($mutability)* local) => {
+ StatementKind::StorageLive(local) => {
self.visit_local(
local,
PlaceContext::NonUse(NonUseContext::StorageLive),
location
);
}
- StatementKind::StorageDead(ref $($mutability)* local) => {
+ StatementKind::StorageDead(local) => {
self.visit_local(
local,
PlaceContext::NonUse(NonUseContext::StorageDead),
location
);
}
- StatementKind::InlineAsm { ref $($mutability)* outputs,
- ref $($mutability)* inputs,
- asm: _ } => {
- for output in & $($mutability)* outputs[..] {
+ StatementKind::InlineAsm { outputs, inputs, asm: _ } => {
+ for output in & $($mutability)? outputs[..] {
self.visit_place(
output,
PlaceContext::MutatingUse(MutatingUseContext::AsmOutput),
location
);
}
- for (span, input) in & $($mutability)* inputs[..] {
+ for (span, input) in & $($mutability)? inputs[..] {
self.visit_span(span);
self.visit_operand(input, location);
}
}
- StatementKind::Retag ( ref $($mutability)* kind,
- ref $($mutability)* place ) => {
+ StatementKind::Retag(kind, place) => {
self.visit_retag(kind, place, location);
}
- StatementKind::AscribeUserType(
- ref $($mutability)* place,
- ref $($mutability)* variance,
- ref $($mutability)* user_ty,
- ) => {
+ StatementKind::AscribeUserType(place, variance, user_ty) => {
self.visit_ascribe_user_ty(place, variance, user_ty, location);
}
StatementKind::Nop => {}
fn super_assign(&mut self,
_block: BasicBlock,
- place: &$($mutability)* Place<'tcx>,
- rvalue: &$($mutability)* Rvalue<'tcx>,
+ place: &$($mutability)? Place<'tcx>,
+ rvalue: &$($mutability)? Rvalue<'tcx>,
location: Location) {
self.visit_place(
place,
fn super_terminator(&mut self,
block: BasicBlock,
- terminator: &$($mutability)* Terminator<'tcx>,
+ terminator: &$($mutability)? Terminator<'tcx>,
location: Location) {
- let Terminator {
- ref $($mutability)* source_info,
- ref $($mutability)* kind,
- } = *terminator;
+ let Terminator { source_info, kind } = terminator;
self.visit_source_info(source_info);
self.visit_terminator_kind(block, kind, location);
fn super_terminator_kind(&mut self,
block: BasicBlock,
- kind: & $($mutability)* TerminatorKind<'tcx>,
+ kind: & $($mutability)? TerminatorKind<'tcx>,
source_location: Location) {
- match *kind {
+ match kind {
TerminatorKind::Goto { target } => {
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
}
- TerminatorKind::SwitchInt { ref $($mutability)* discr,
- ref $($mutability)* switch_ty,
- values: _,
- ref targets } => {
+ TerminatorKind::SwitchInt {
+ discr,
+ switch_ty,
+ values: _,
+ targets
+ } => {
self.visit_operand(discr, source_location);
self.visit_ty(switch_ty, TyContext::Location(source_location));
- for &target in targets {
- self.visit_branch(block, target);
+ for target in targets {
+ self.visit_branch(block, *target);
}
}
TerminatorKind::Unreachable => {
}
- TerminatorKind::Drop { ref $($mutability)* location,
- target,
- unwind } => {
+ TerminatorKind::Drop {
+ location,
+ target,
+ unwind,
+ } => {
self.visit_place(
location,
PlaceContext::MutatingUse(MutatingUseContext::Drop),
source_location
);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
unwind.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::DropAndReplace { ref $($mutability)* location,
- ref $($mutability)* value,
- target,
- unwind } => {
+ TerminatorKind::DropAndReplace {
+ location,
+ value,
+ target,
+ unwind,
+ } => {
self.visit_place(
location,
PlaceContext::MutatingUse(MutatingUseContext::Drop),
source_location
);
self.visit_operand(value, source_location);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
unwind.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::Call { ref $($mutability)* func,
- ref $($mutability)* args,
- ref $($mutability)* destination,
- cleanup,
- from_hir_call: _, } => {
+ TerminatorKind::Call {
+ func,
+ args,
+ destination,
+ cleanup,
+ from_hir_call: _,
+ } => {
self.visit_operand(func, source_location);
for arg in args {
self.visit_operand(arg, source_location);
}
- if let Some((ref $($mutability)* destination, target)) = *destination {
+ if let Some((destination, target)) = destination {
self.visit_place(
destination,
PlaceContext::MutatingUse(MutatingUseContext::Call),
source_location
);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
}
cleanup.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::Assert { ref $($mutability)* cond,
- expected: _,
- ref $($mutability)* msg,
- target,
- cleanup } => {
+ TerminatorKind::Assert {
+ cond,
+ expected: _,
+ msg,
+ target,
+ cleanup,
+ } => {
self.visit_operand(cond, source_location);
self.visit_assert_message(msg, source_location);
- self.visit_branch(block, target);
+ self.visit_branch(block, *target);
cleanup.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::Yield { ref $($mutability)* value,
- resume,
- drop } => {
+ TerminatorKind::Yield {
+ value,
+ resume,
+ drop,
+ } => {
self.visit_operand(value, source_location);
- self.visit_branch(block, resume);
+ self.visit_branch(block, *resume);
drop.map(|t| self.visit_branch(block, t));
}
- TerminatorKind::FalseEdges { real_target, ref imaginary_targets} => {
- self.visit_branch(block, real_target);
+ TerminatorKind::FalseEdges { real_target, imaginary_targets } => {
+ self.visit_branch(block, *real_target);
for target in imaginary_targets {
self.visit_branch(block, *target);
}
}
TerminatorKind::FalseUnwind { real_target, unwind } => {
- self.visit_branch(block, real_target);
+ self.visit_branch(block, *real_target);
if let Some(unwind) = unwind {
- self.visit_branch(block, unwind);
+ self.visit_branch(block, *unwind);
}
}
}
}
fn super_assert_message(&mut self,
- msg: & $($mutability)* AssertMessage<'tcx>,
+ msg: & $($mutability)? AssertMessage<'tcx>,
location: Location) {
use crate::mir::interpret::EvalErrorKind::*;
- if let BoundsCheck {
- ref $($mutability)* len,
- ref $($mutability)* index
- } = *msg {
+ if let BoundsCheck { len, index } = msg {
self.visit_operand(len, location);
self.visit_operand(index, location);
}
}
fn super_rvalue(&mut self,
- rvalue: & $($mutability)* Rvalue<'tcx>,
+ rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
- match *rvalue {
- Rvalue::Use(ref $($mutability)* operand) => {
+ match rvalue {
+ Rvalue::Use(operand) => {
self.visit_operand(operand, location);
}
- Rvalue::Repeat(ref $($mutability)* value, _) => {
+ Rvalue::Repeat(value, _) => {
self.visit_operand(value, location);
}
- Rvalue::Ref(ref $($mutability)* r, bk, ref $($mutability)* path) => {
+ Rvalue::Ref(r, bk, path) => {
self.visit_region(r, location);
let ctx = match bk {
BorrowKind::Shared => PlaceContext::NonMutatingUse(
self.visit_place(path, ctx, location);
}
- Rvalue::Len(ref $($mutability)* path) => {
+ Rvalue::Len(path) => {
self.visit_place(
path,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
);
}
- Rvalue::Cast(_cast_kind,
- ref $($mutability)* operand,
- ref $($mutability)* ty) => {
+ Rvalue::Cast(_cast_kind, operand, ty) => {
self.visit_operand(operand, location);
self.visit_ty(ty, TyContext::Location(location));
}
- Rvalue::BinaryOp(_bin_op,
- ref $($mutability)* lhs,
- ref $($mutability)* rhs) |
- Rvalue::CheckedBinaryOp(_bin_op,
- ref $($mutability)* lhs,
- ref $($mutability)* rhs) => {
+ Rvalue::BinaryOp(_bin_op, lhs, rhs)
+ | Rvalue::CheckedBinaryOp(_bin_op, lhs, rhs) => {
self.visit_operand(lhs, location);
self.visit_operand(rhs, location);
}
- Rvalue::UnaryOp(_un_op, ref $($mutability)* op) => {
+ Rvalue::UnaryOp(_un_op, op) => {
self.visit_operand(op, location);
}
- Rvalue::Discriminant(ref $($mutability)* place) => {
+ Rvalue::Discriminant(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
);
}
- Rvalue::NullaryOp(_op, ref $($mutability)* ty) => {
+ Rvalue::NullaryOp(_op, ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
- Rvalue::Aggregate(ref $($mutability)* kind,
- ref $($mutability)* operands) => {
- let kind = &$($mutability)* **kind;
- match *kind {
- AggregateKind::Array(ref $($mutability)* ty) => {
+ Rvalue::Aggregate(kind, operands) => {
+ let kind = &$($mutability)? **kind;
+ match kind {
+ AggregateKind::Array(ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
AggregateKind::Tuple => {
}
- AggregateKind::Adt(_adt_def,
- _variant_index,
- ref $($mutability)* substs,
- _user_substs,
- _active_field_index) => {
+ AggregateKind::Adt(
+ _adt_def,
+ _variant_index,
+ substs,
+ _user_substs,
+ _active_field_index
+ ) => {
self.visit_substs(substs, location);
}
- AggregateKind::Closure(ref $($mutability)* def_id,
- ref $($mutability)* closure_substs) => {
+ AggregateKind::Closure(
+ def_id,
+ closure_substs
+ ) => {
self.visit_def_id(def_id, location);
self.visit_closure_substs(closure_substs, location);
}
- AggregateKind::Generator(ref $($mutability)* def_id,
- ref $($mutability)* generator_substs,
- _movability) => {
+ AggregateKind::Generator(
+ def_id,
+ generator_substs,
+ _movability,
+ ) => {
self.visit_def_id(def_id, location);
self.visit_generator_substs(generator_substs, location);
}
}
fn super_operand(&mut self,
- operand: & $($mutability)* Operand<'tcx>,
+ operand: & $($mutability)? Operand<'tcx>,
location: Location) {
- match *operand {
- Operand::Copy(ref $($mutability)* place) => {
+ match operand {
+ Operand::Copy(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
location
);
}
- Operand::Move(ref $($mutability)* place) => {
+ Operand::Move(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Move),
location
);
}
- Operand::Constant(ref $($mutability)* constant) => {
+ Operand::Constant(constant) => {
self.visit_constant(constant, location);
}
}
}
fn super_ascribe_user_ty(&mut self,
- place: & $($mutability)* Place<'tcx>,
- _variance: & $($mutability)* ty::Variance,
- user_ty: & $($mutability)* UserTypeProjection<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
+ _variance: & $($mutability)? ty::Variance,
+ user_ty: & $($mutability)? UserTypeProjection<'tcx>,
location: Location) {
self.visit_place(
place,
}
fn super_retag(&mut self,
- _kind: & $($mutability)* RetagKind,
- place: & $($mutability)* Place<'tcx>,
+ _kind: & $($mutability)? RetagKind,
+ place: & $($mutability)? Place<'tcx>,
location: Location) {
self.visit_place(
place,
}
fn super_place(&mut self,
- place: & $($mutability)* Place<'tcx>,
+ place: & $($mutability)? Place<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
- match *place {
- Place::Local(ref $($mutability)* local) => {
+ match place {
+ Place::Local(local) => {
self.visit_local(local, context, location);
}
- Place::Static(ref $($mutability)* static_) => {
+ Place::Static(static_) => {
self.visit_static(static_, context, location);
}
- Place::Promoted(ref $($mutability)* promoted) => {
- self.visit_ty(& $($mutability)* promoted.1, TyContext::Location(location));
+ Place::Promoted(promoted) => {
+ self.visit_ty(& $($mutability)? promoted.1, TyContext::Location(location));
},
- Place::Projection(ref $($mutability)* proj) => {
+ Place::Projection(proj) => {
self.visit_projection(proj, context, location);
}
}
}
fn super_static(&mut self,
- static_: & $($mutability)* Static<'tcx>,
+ static_: & $($mutability)? Static<'tcx>,
_context: PlaceContext<'tcx>,
location: Location) {
- let Static {
- ref $($mutability)* def_id,
- ref $($mutability)* ty,
- } = *static_;
+ let Static { def_id, ty } = static_;
self.visit_def_id(def_id, location);
self.visit_ty(ty, TyContext::Location(location));
}
fn super_projection(&mut self,
- proj: & $($mutability)* PlaceProjection<'tcx>,
+ proj: & $($mutability)? PlaceProjection<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
- let Projection {
- ref $($mutability)* base,
- ref $($mutability)* elem,
- } = *proj;
+ let Projection { base, elem } = proj;
let context = if context.is_mutating_use() {
PlaceContext::MutatingUse(MutatingUseContext::Projection)
} else {
}
fn super_projection_elem(&mut self,
- proj: & $($mutability)* PlaceElem<'tcx>,
+ proj: & $($mutability)? PlaceElem<'tcx>,
location: Location) {
- match *proj {
+ match proj {
ProjectionElem::Deref => {
}
ProjectionElem::Subslice { from: _, to: _ } => {
}
- ProjectionElem::Field(_field, ref $($mutability)* ty) => {
+ ProjectionElem::Field(_field, ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
- ProjectionElem::Index(ref $($mutability)* local) => {
+ ProjectionElem::Index(local) => {
self.visit_local(
local,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
fn super_local_decl(&mut self,
local: Local,
- local_decl: & $($mutability)* LocalDecl<'tcx>) {
+ local_decl: & $($mutability)? LocalDecl<'tcx>) {
let LocalDecl {
mutability: _,
- ref $($mutability)* ty,
- ref $($mutability)* user_ty,
+ ty,
+ user_ty,
name: _,
- ref $($mutability)* source_info,
- ref $($mutability)* visibility_scope,
+ source_info,
+ visibility_scope,
internal: _,
is_user_variable: _,
is_block_tail: _,
- } = *local_decl;
+ } = local_decl;
self.visit_ty(ty, TyContext::LocalDecl {
local,
source_info: *source_info,
});
- for (user_ty, _) in & $($mutability)* user_ty.contents {
+ for (user_ty, _) in & $($mutability)? user_ty.contents {
self.visit_user_type_projection(user_ty);
}
self.visit_source_info(source_info);
}
fn super_source_scope(&mut self,
- _scope: & $($mutability)* SourceScope) {
+ _scope: & $($mutability)? SourceScope) {
}
fn super_branch(&mut self,
}
fn super_constant(&mut self,
- constant: & $($mutability)* Constant<'tcx>,
+ constant: & $($mutability)? Constant<'tcx>,
location: Location) {
let Constant {
- ref $($mutability)* span,
- ref $($mutability)* ty,
- ref $($mutability)* user_ty,
- ref $($mutability)* literal,
- } = *constant;
+ span,
+ ty,
+ user_ty,
+ literal,
+ } = constant;
self.visit_span(span);
self.visit_ty(ty, TyContext::Location(location));
self.visit_const(literal, location);
}
- fn super_def_id(&mut self, _def_id: & $($mutability)* DefId) {
+ fn super_def_id(&mut self, _def_id: & $($mutability)? DefId) {
}
- fn super_span(&mut self, _span: & $($mutability)* Span) {
+ fn super_span(&mut self, _span: & $($mutability)? Span) {
}
- fn super_source_info(&mut self, source_info: & $($mutability)* SourceInfo) {
+ fn super_source_info(&mut self, source_info: & $($mutability)? SourceInfo) {
let SourceInfo {
- ref $($mutability)* span,
- ref $($mutability)* scope,
- } = *source_info;
+ span,
+ scope,
+ } = source_info;
self.visit_span(span);
self.visit_source_scope(scope);
fn super_user_type_projection(
&mut self,
- _ty: & $($mutability)* UserTypeProjection<'tcx>,
+ _ty: & $($mutability)? UserTypeProjection<'tcx>,
) {
}
fn super_user_type_annotation(
&mut self,
_index: UserTypeAnnotationIndex,
- ty: & $($mutability)* CanonicalUserTypeAnnotation<'tcx>,
+ ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
) {
- self.visit_span(& $($mutability)* ty.span);
- self.visit_ty(& $($mutability)* ty.inferred_ty, TyContext::UserTy(ty.span));
+ self.visit_span(& $($mutability)? ty.span);
+ self.visit_ty(& $($mutability)? ty.inferred_ty, TyContext::UserTy(ty.span));
}
- fn super_ty(&mut self, _ty: & $($mutability)* Ty<'tcx>) {
+ fn super_ty(&mut self, _ty: & $($mutability)? Ty<'tcx>) {
}
- fn super_region(&mut self, _region: & $($mutability)* ty::Region<'tcx>) {
+ fn super_region(&mut self, _region: & $($mutability)? ty::Region<'tcx>) {
}
- fn super_const(&mut self, _const: & $($mutability)* &'tcx ty::LazyConst<'tcx>) {
+ fn super_const(&mut self, _const: & $($mutability)? &'tcx ty::LazyConst<'tcx>) {
}
- fn super_substs(&mut self, _substs: & $($mutability)* &'tcx Substs<'tcx>) {
+ fn super_substs(&mut self, _substs: & $($mutability)? &'tcx Substs<'tcx>) {
}
fn super_generator_substs(&mut self,
- _substs: & $($mutability)* GeneratorSubsts<'tcx>) {
+ _substs: & $($mutability)? GeneratorSubsts<'tcx>) {
}
fn super_closure_substs(&mut self,
- _substs: & $($mutability)* ClosureSubsts<'tcx>) {
+ _substs: & $($mutability)? ClosureSubsts<'tcx>) {
}
// Convenience methods
- fn visit_location(&mut self, mir: & $($mutability)* Mir<'tcx>, location: Location) {
- let basic_block = & $($mutability)* mir[location.block];
+ fn visit_location(&mut self, mir: & $($mutability)? Mir<'tcx>, location: Location) {
+ let basic_block = & $($mutability)? mir[location.block];
if basic_block.statements.len() == location.statement_index {
- if let Some(ref $($mutability)* terminator) = basic_block.terminator {
+ if let Some(ref $($mutability)? terminator) = basic_block.terminator {
self.visit_terminator(location.block, terminator, location)
}
} else {
- let statement = & $($mutability)*
+ let statement = & $($mutability)?
basic_block.statements[location.statement_index];
self.visit_statement(location.block, statement, location)
}
}).0
}
- /// Flattens multiple binding levels into one. So `for<'a> for<'b> Foo`
- /// becomes `for<'a,'b> Foo`.
- pub fn flatten_late_bound_regions<T>(self, bound2_value: &Binder<Binder<T>>)
- -> Binder<T>
- where T: TypeFoldable<'tcx>
- {
- let bound0_value = bound2_value.skip_binder().skip_binder();
- let value = self.fold_regions(bound0_value, &mut false, |region, current_depth| {
- match *region {
- ty::ReLateBound(debruijn, br) => {
- // We assume no regions bound *outside* of the
- // binders in `bound2_value` (nmatsakis added in
- // the course of this PR; seems like a reasonable
- // sanity check though).
- assert!(debruijn == current_depth);
- self.mk_region(ty::ReLateBound(current_depth, br))
- }
- _ => {
- region
- }
- }
- });
- Binder::bind(value)
- }
-
/// Returns a set of all late-bound regions that are constrained
/// by `value`, meaning that if we instantiate those LBR with
/// variables and equate `value` with something else, those
call_site: item.span, // use the call site of the static
def_site: None,
format: MacroAttribute(Symbol::intern(name)),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("rustc_attrs"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
+#![feature(nll)]
#![feature(rustc_private)]
#![deny(rust_2018_idioms)]
#![forbid(unsafe_code)]
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(try_from)]
// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
#[allow(unused_extern_crates)]
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
let cwd = env::current_dir().unwrap();
let mut lib = fs::canonicalize(&cwd.join(lib)).unwrap_or_else(|_| cwd.join(lib));
- lib.pop();
+ lib.pop(); // strip filename
let mut output = cwd.join(&config.out_filename);
- output.pop();
+ output.pop(); // strip filename
let output = fs::canonicalize(&output).unwrap_or(output);
let relative = path_relative_from(&lib, &output).unwrap_or_else(||
panic!("couldn't create relative path from {:?} to {:?}", output, lib));
authors = ["The Rust Project Developers"]
name = "rustc_codegen_ssa"
version = "0.0.0"
+edition = "2018"
[lib]
name = "rustc_codegen_ssa"
use rustc::hir::def_id::CrateNum;
use super::command::Command;
-use CrateInfo;
+use crate::CrateInfo;
use cc::windows_registry;
use std::fs;
use super::write::CodegenContext;
-use traits::*;
-use ModuleCodegen;
+use crate::traits::*;
+use crate::ModuleCodegen;
use rustc::util::time_graph::Timeline;
use rustc_errors::FatalError;
-use {ModuleCodegen, ModuleKind, CachedModuleCodegen, CompiledModule, CrateInfo, CodegenResults,
- RLIB_BYTECODE_EXTENSION};
+use crate::{ModuleCodegen, ModuleKind, CachedModuleCodegen, CompiledModule, CrateInfo,
+ CodegenResults, RLIB_BYTECODE_EXTENSION};
use super::linker::LinkerInfo;
use super::lto::{self, SerializedModule};
use super::link::{self, remove, get_linker};
use super::command::Command;
use super::symbol_export::ExportedSymbols;
-use memmap;
+use crate::traits::*;
use rustc_incremental::{copy_cgu_workproducts_to_incr_comp_cache_dir,
in_incr_comp_dir, in_incr_comp_dir_sess};
use rustc::dep_graph::{WorkProduct, WorkProductId, WorkProductFileKind};
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
use rustc::util::time_graph::{self, TimeGraph, Timeline};
-use traits::*;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::ty::TyCtxt;
use rustc::util::common::{time_depth, set_time_depth, print_time_passes_entry};
//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int,
//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`.
-use {ModuleCodegen, ModuleKind, CachedModuleCodegen};
+use crate::{ModuleCodegen, ModuleKind, CachedModuleCodegen};
use rustc::dep_graph::cgu_reuse_tracker::CguReuse;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::util::profiling::ProfileCategory;
use rustc::session::config::{self, EntryFnType, Lto};
use rustc::session::Session;
-use mir::place::PlaceRef;
-use back::write::{OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm,
- submit_post_lto_module_to_llvm};
-use {MemFlags, CrateInfo};
-use callee;
use rustc_mir::monomorphize::item::DefPathBasedNames;
-use common::{RealPredicate, TypeKind, IntPredicate};
-use meth;
-use mir;
use rustc::util::time_graph;
use rustc_mir::monomorphize::Instance;
use rustc_mir::monomorphize::partitioning::{CodegenUnit, CodegenUnitExt};
-use mono_item::MonoItem;
use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::sync::Lrc;
use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr};
use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
+use crate::mir::place::PlaceRef;
+use crate::back::write::{OngoingCodegen, start_async_codegen, submit_pre_lto_module_to_llvm,
+ submit_post_lto_module_to_llvm};
+use crate::{MemFlags, CrateInfo};
+use crate::callee;
+use crate::common::{RealPredicate, TypeKind, IntPredicate};
+use crate::meth;
+use crate::mir;
+use crate::mono_item::MonoItem;
-use traits::*;
+use crate::traits::*;
use std::any::Any;
use std::cmp;
use syntax::attr;
use rustc::hir;
-use mir::operand::OperandValue;
+use crate::mir::operand::OperandValue;
use std::marker::PhantomData;
-use traits::*;
+use crate::traits::*;
use rustc::ty;
use rustc::ty::subst::Substs;
use rustc::hir::def_id::DefId;
use rustc::hir::def_id::DefId;
use rustc::middle::lang_items::LangItem;
-use base;
-use traits::*;
+use crate::base;
+use crate::traits::*;
use rustc::hir;
-use traits::BuilderMethods;
+use crate::traits::BuilderMethods;
pub fn type_needs_drop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
ty.needs_drop(tcx, ty::ParamEnv::reveal_all())
mod temp_stable_hash_impls {
use rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher,
HashStable};
- use ModuleCodegen;
+ use crate::ModuleCodegen;
impl<HCX, M> HashStable<HCX> for ModuleCodegen<M> {
fn hash_stable<W: StableHasherResult>(&self,
//
// Code relating to drop glue.
-use std;
-
-use common::IntPredicate;
-use meth;
use rustc::ty::{self, Ty};
-use traits::*;
+use crate::common::IntPredicate;
+use crate::meth;
+use crate::traits::*;
pub fn size_and_align_of_dst<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
bx: &mut Bx,
#![feature(nll)]
#![allow(unused_attributes)]
#![allow(dead_code)]
+#![deny(rust_2018_idioms)]
+#![allow(explicit_outlives_requirements)]
+#![allow(elided_lifetimes_in_paths)]
#![recursion_limit="256"]
//! The backend-agnostic functions of this crate use functions defined in various traits that
//! have to be implemented by each backends.
-#[macro_use] extern crate bitflags;
#[macro_use] extern crate log;
-extern crate rustc_apfloat;
-#[macro_use] extern crate rustc;
-extern crate rustc_target;
-extern crate rustc_mir;
+#[macro_use] extern crate rustc;
#[macro_use] extern crate syntax;
-extern crate syntax_pos;
-extern crate rustc_incremental;
-extern crate rustc_codegen_utils;
-extern crate rustc_data_structures;
-extern crate rustc_allocator;
-extern crate rustc_fs_util;
-extern crate serialize;
-extern crate rustc_errors;
-extern crate rustc_demangle;
-extern crate cc;
-extern crate libc;
-extern crate jobserver;
-extern crate memmap;
-extern crate num_cpus;
use std::path::PathBuf;
use rustc::dep_graph::WorkProduct;
Allocator,
}
-bitflags! {
+bitflags::bitflags! {
pub struct MemFlags: u8 {
const VOLATILE = 1 << 0;
const NONTEMPORAL = 1 << 1;
use rustc_target::abi::call::FnType;
-use callee;
use rustc_mir::monomorphize;
-use traits::*;
+use crate::callee;
+use crate::traits::*;
use rustc::ty::{self, Ty};
use rustc::ty;
use rustc::ty::layout::{LayoutOf, HasTyCtxt};
use super::FunctionCx;
-use traits::*;
+use crate::traits::*;
pub fn non_ssa_locals<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
fx: &FunctionCx<'a, 'tcx, Bx>
use rustc::mir::interpret::EvalErrorKind;
use rustc_target::abi::call::{ArgType, FnType, PassMode};
use rustc_target::spec::abi::Abi;
-use base;
-use MemFlags;
-use common::{self, IntPredicate};
-use meth;
use rustc_mir::monomorphize;
+use crate::base;
+use crate::MemFlags;
+use crate::common::{self, IntPredicate};
+use crate::meth;
-use traits::*;
+use crate::traits::*;
use syntax::symbol::Symbol;
use syntax_pos::Pos;
use rustc::ty::{self, Ty};
use rustc::ty::layout;
use syntax::source_map::Span;
-use traits::*;
+use crate::traits::*;
use super::FunctionCx;
use rustc::mir::{self, Mir};
use rustc::ty::subst::Substs;
use rustc::session::config::DebugInfo;
-use base;
-use debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext};
use rustc_mir::monomorphize::Instance;
use rustc_target::abi::call::{FnType, PassMode};
-use traits::*;
+use crate::base;
+use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext};
+use crate::traits::*;
use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span};
use syntax::symbol::keywords;
use rustc::ty;
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
-use base;
-use MemFlags;
-use glue;
+use crate::base;
+use crate::MemFlags;
+use crate::glue;
-use traits::*;
+use crate::traits::*;
use std::fmt;
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
use rustc::mir;
use rustc::mir::tcx::PlaceTy;
-use MemFlags;
-use common::IntPredicate;
-use glue;
+use crate::MemFlags;
+use crate::common::IntPredicate;
+use crate::glue;
-use traits::*;
+use crate::traits::*;
use super::{FunctionCx, LocalRef};
use super::operand::OperandValue;
use rustc_apfloat::{ieee, Float, Status, Round};
use std::{u128, i128};
-use base;
-use MemFlags;
-use callee;
-use common::{self, RealPredicate, IntPredicate};
+use crate::base;
+use crate::MemFlags;
+use crate::callee;
+use crate::common::{self, RealPredicate, IntPredicate};
use rustc_mir::monomorphize;
-use traits::*;
+use crate::traits::*;
use super::{FunctionCx, LocalRef};
use super::operand::{OperandRef, OperandValue};
use rustc::mir;
-use traits::BuilderMethods;
+use crate::traits::BuilderMethods;
use super::FunctionCx;
use super::LocalRef;
use super::OperandValue;
-use traits::*;
+use crate::traits::*;
impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
pub fn codegen_statement(
-use base;
use rustc::hir;
use rustc::hir::def::Def;
use rustc::mir::mono::{Linkage, Visibility};
use rustc::ty::layout::HasTyCtxt;
use std::fmt;
-use traits::*;
+use crate::base;
+use crate::traits::*;
pub use rustc::mir::mono::MonoItem;
use super::BackendTypes;
-use mir::place::PlaceRef;
+use crate::mir::place::PlaceRef;
use rustc::hir::{GlobalAsm, InlineAsm};
pub trait AsmBuilderMethods<'tcx>: BackendTypes {
use super::intrinsic::IntrinsicCallMethods;
use super::type_::ArgTypeMethods;
use super::{HasCodegen, StaticBuilderMethods};
-use common::{AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope};
-use mir::operand::OperandRef;
-use mir::place::PlaceRef;
+use crate::common::{AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate,
+ SynchronizationScope};
+use crate::mir::operand::OperandRef;
+use crate::mir::place::PlaceRef;
+use crate::MemFlags;
use rustc::ty::Ty;
use rustc::ty::layout::{Align, Size};
use std::ffi::CStr;
-use MemFlags;
use std::borrow::Cow;
use std::ops::Range;
use super::BackendTypes;
-use mir::place::PlaceRef;
+use crate::mir::place::PlaceRef;
use rustc::mir::interpret::Allocation;
use rustc::mir::interpret::Scalar;
use rustc::ty::layout;
use super::BackendTypes;
-use debuginfo::{FunctionDebugContext, MirDebugScope, VariableAccess, VariableKind};
+use crate::debuginfo::{FunctionDebugContext, MirDebugScope, VariableAccess, VariableKind};
use rustc::hir::def_id::CrateNum;
use rustc::mir;
use rustc::ty::{self, Ty};
use super::BackendTypes;
-use mir::operand::OperandRef;
+use crate::mir::operand::OperandRef;
use rustc::ty::Ty;
use rustc_target::abi::call::FnType;
use syntax_pos::Span;
use super::misc::MiscMethods;
use super::Backend;
use super::HasCodegen;
-use common::{self, TypeKind};
-use mir::place::PlaceRef;
+use crate::common::{self, TypeKind};
+use crate::mir::place::PlaceRef;
use rustc::ty::layout::{self, Align, Size, TyLayout};
use rustc::ty::{self, Ty};
use rustc::util::nodemap::FxHashMap;
-use back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
-use back::write::{CodegenContext, ModuleConfig};
-use {CompiledModule, ModuleCodegen};
+use crate::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
+use crate::back::write::{CodegenContext, ModuleConfig};
+use crate::{CompiledModule, ModuleCodegen};
use rustc::dep_graph::WorkProduct;
use rustc::util::time_graph::Timeline;
/// A simple static assertion macro. The first argument should be a unique
/// ALL_CAPS identifier that describes the condition.
#[macro_export]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(type_ascription))]
macro_rules! static_assert {
($name:ident: $test:expr) => {
// Use the bool to access an array such that if the bool is false, the access
early_error(sopts.error_format, "no input filename given");
}
1 => panic!("make_input should have provided valid inputs"),
- _ => early_error(sopts.error_format, "multiple input filenames provided"),
+ _ =>
+ early_error(
+ sopts.error_format,
+ &format!(
+ "multiple input filenames provided (first two filenames are `{}` and `{}`)",
+ matches.free[0],
+ matches.free[1],
+ ),
+ )
}
}
#![allow(unused_attributes)]
#![feature(range_contains)]
#![cfg_attr(unix, feature(libc))]
+#![feature(nll)]
#![feature(optin_builtin_traits)]
#![deny(rust_2018_idioms)]
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(static_nobundle)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
ProcMacro::Bang { name, client } => {
(name, SyntaxExtension::ProcMacro {
expander: Box::new(BangProcMacro { client }),
- allow_internal_unstable: false,
+ allow_internal_unstable: None,
edition: root.edition,
})
}
let client = proc_macro::bridge::client::Client::expand1(proc_macro::quote);
let ext = SyntaxExtension::ProcMacro {
expander: Box::new(BangProcMacro { client }),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("proc_macro_def_site"),
+ ].into()),
edition: data.root.edition,
};
return LoadedMacro::ProcMacro(Lrc::new(ext));
let def_id = field.did;
debug!("IsolatedEncoder::encode_field({:?})", def_id);
- let variant_id = tcx.hir().as_local_node_id(variant.did).unwrap();
+ let variant_id = tcx.hir().as_local_hir_id(variant.did).unwrap();
let variant_data = tcx.hir().expect_variant_data(variant_id);
Entry {
format!("`{}` would have to be valid for `{}`...", name, region_name),
);
- if let Some(fn_node_id) = self.infcx.tcx.hir().as_local_node_id(self.mir_def_id) {
+ if let Some(fn_hir_id) = self.infcx.tcx.hir().as_local_hir_id(self.mir_def_id) {
err.span_label(
drop_span,
format!(
"...but `{}` will be dropped here, when the function `{}` returns",
name,
- self.infcx.tcx.hir().name(fn_node_id),
+ self.infcx.tcx.hir().name_by_hir_id(fn_hir_id),
),
);
let upvar_decl = &self.mir.upvar_decls[field.index()];
let upvar_hir_id =
upvar_decl.var_hir_id.assert_crate_local();
- let upvar_node_id =
- self.infcx.tcx.hir().hir_to_node_id(upvar_hir_id);
- let upvar_span = self.infcx.tcx.hir().span(upvar_node_id);
+ let upvar_span = self.infcx.tcx.hir().span_by_hir_id(
+ upvar_hir_id);
diag.span_label(upvar_span, "captured outer variable");
break;
}
use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt};
use rustc::util::ppaux::RegionHighlightMode;
use rustc_errors::DiagnosticBuilder;
-use syntax::ast::{Name, DUMMY_NODE_ID};
+use syntax::ast::Name;
use syntax::symbol::keywords;
use syntax_pos::Span;
use syntax_pos::symbol::InternedString;
name: &InternedString,
) -> Span {
let scope = error_region.free_region_binding_scope(tcx);
- let node = tcx.hir().as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
+ let node = tcx.hir().as_local_hir_id(scope).unwrap_or(hir::DUMMY_HIR_ID);
- let span = tcx.sess.source_map().def_span(tcx.hir().span(node));
+ let span = tcx.sess.source_map().def_span(tcx.hir().span_by_hir_id(node));
if let Some(param) = tcx.hir()
.get_generics(scope)
.and_then(|generics| generics.get_named(name))
let (return_span, mir_description) = match tcx.hir().get(mir_node_id) {
hir::Node::Expr(hir::Expr {
- node: hir::ExprKind::Closure(_, _, _, span, gen_move),
+ node: hir::ExprKind::Closure(_, return_ty, _, span, gen_move),
..
}) => (
- tcx.sess.source_map().end_point(*span),
+ match return_ty.output {
+ hir::FunctionRetTy::DefaultReturn(_) => tcx.sess.source_map().end_point(*span),
+ hir::FunctionRetTy::Return(_) => return_ty.output.span(),
+ },
if gen_move.is_some() {
" of generator"
} else {
upvar_index: usize,
) -> (Symbol, Span) {
let upvar_hir_id = mir.upvar_decls[upvar_index].var_hir_id.assert_crate_local();
- let upvar_node_id = tcx.hir().hir_to_node_id(upvar_hir_id);
- debug!("get_upvar_name_and_span_for_region: upvar_node_id={:?}", upvar_node_id);
+ debug!("get_upvar_name_and_span_for_region: upvar_hir_id={:?}", upvar_hir_id);
- let upvar_name = tcx.hir().name(upvar_node_id);
- let upvar_span = tcx.hir().span(upvar_node_id);
+ let upvar_name = tcx.hir().name_by_hir_id(upvar_hir_id);
+ let upvar_span = tcx.hir().span_by_hir_id(upvar_hir_id);
debug!("get_upvar_name_and_span_for_region: upvar_name={:?} upvar_span={:?}",
upvar_name, upvar_span);
owner: fn_def_id.index,
local_id: *late_bound,
};
- let region_node_id = tcx.hir().hir_to_node_id(hir_id);
- let name = tcx.hir().name(region_node_id).as_interned_str();
- let region_def_id = tcx.hir().local_def_id(region_node_id);
+ let name = tcx.hir().name_by_hir_id(hir_id).as_interned_str();
+ let region_def_id = tcx.hir().local_def_id_from_hir_id(hir_id);
let liberated_region = tcx.mk_region(ty::ReFree(ty::FreeRegion {
scope: fn_def_id,
bound_region: ty::BoundRegion::BrNamed(region_def_id, name),
) => {
(*body_id, ty.span)
}
- Node::AnonConst(hir::AnonConst { body, id, .. }) => {
- (*body, tcx.hir().span(*id))
+ Node::AnonConst(hir::AnonConst { body, hir_id, .. }) => {
+ (*body, tcx.hir().span_by_hir_id(*hir_id))
}
_ => span_bug!(tcx.hir().span(id), "can't build MIR for {:?}", def_id),
let self_arg;
if let Some(ref fn_decl) = tcx.hir().fn_decl(owner_id) {
let ty_hir_id = fn_decl.inputs[index].hir_id;
- let ty_span = tcx.hir().span(tcx.hir().hir_to_node_id(ty_hir_id));
+ let ty_span = tcx.hir().span_by_hir_id(ty_hir_id);
opt_ty_info = Some(ty_span);
self_arg = if index == 0 && fn_decl.implicit_self.has_implicit_self() {
match fn_decl.implicit_self {
for (index, stmt) in stmts.iter().enumerate() {
let hir_id = stmt.hir_id;
let opt_dxn_ext = cx.region_scope_tree.opt_destruction_scope(hir_id.local_id);
- let stmt_span = StatementSpan(cx.tcx.hir().span(stmt.id));
+ let stmt_span = StatementSpan(cx.tcx.hir().span_by_hir_id(hir_id));
match stmt.node {
hir::StmtKind::Expr(ref expr) |
hir::StmtKind::Semi(ref expr) => {
use syntax::symbol::Symbol;
use rustc::ty;
-use rustc::ty::layout::{LayoutOf, Primitive};
+use rustc::ty::layout::{LayoutOf, Primitive, Size};
use rustc::mir::BinOp;
use rustc::mir::interpret::{
EvalResult, EvalErrorKind, Scalar,
self.binop_with_overflow(bin_op, lhs, rhs, dest)?;
}
}
+ "saturating_add" | "saturating_sub" => {
+ let l = self.read_immediate(args[0])?;
+ let r = self.read_immediate(args[1])?;
+ let is_add = intrinsic_name == "saturating_add";
+ let (val, overflowed) = self.binary_op_imm(if is_add {
+ BinOp::Add
+ } else {
+ BinOp::Sub
+ }, l, r)?;
+ let val = if overflowed {
+ let num_bits = l.layout.size.bits();
+ if l.layout.abi.is_signed() {
+ // For signed ints the saturated value depends on the sign of the first
+ // term since the sign of the second term can be inferred from this and
+ // the fact that the operation has overflowed (if either is 0 no
+ // overflow can occur)
+ let first_term: u128 = l.to_scalar()?.to_bits(l.layout.size)?;
+ let first_term_positive = first_term & (1 << (num_bits-1)) == 0;
+ if first_term_positive {
+ // Negative overflow not possible since the positive first term
+ // can only increase an (in range) negative term for addition
+ // or corresponding negated positive term for subtraction
+ Scalar::from_uint((1u128 << (num_bits - 1)) - 1, // max positive
+ Size::from_bits(num_bits))
+ } else {
+ // Positive overflow not possible for similar reason
+ // max negative
+ Scalar::from_uint(1u128 << (num_bits - 1), Size::from_bits(num_bits))
+ }
+ } else { // unsigned
+ if is_add {
+ // max unsigned
+ Scalar::from_uint(u128::max_value() >> (128 - num_bits),
+ Size::from_bits(num_bits))
+ } else { // underflow to 0
+ Scalar::from_uint(0u128, Size::from_bits(num_bits))
+ }
+ }
+ } else {
+ val
+ };
+ self.write_scalar(val, dest)?;
+ }
"unchecked_shl" | "unchecked_shr" => {
let l = self.read_immediate(args[0])?;
let r = self.read_immediate(args[1])?;
let src = match self.try_read_immediate(src)? {
Ok(src_val) => {
// Yay, we got a value that we can write directly.
+ // FIXME: Add a check to make sure that if `src` is indirect,
+ // it does not overlap with `dest`.
return self.write_immediate_no_validate(src_val, dest);
}
Err(mplace) => mplace,
self.memory.copy(
src_ptr, src_align,
dest_ptr, dest_align,
- dest.layout.size, false
+ dest.layout.size,
+ /*nonoverlapping*/ true,
)?;
Ok(())
}
macro_rules! make_value_visitor {
- ($visitor_trait_name:ident, $($mutability:ident)*) => {
+ ($visitor_trait_name:ident, $($mutability:ident)?) => {
// How to traverse a value and what to do when we are at the leaves.
pub trait $visitor_trait_name<'a, 'mir, 'tcx: 'mir+'a, M: Machine<'a, 'mir, 'tcx>>: Sized {
type V: Value<'a, 'mir, 'tcx, M>;
/// The visitor must have an `EvalContext` in it.
- fn ecx(&$($mutability)* self)
- -> &$($mutability)* EvalContext<'a, 'mir, 'tcx, M>;
+ fn ecx(&$($mutability)? self)
+ -> &$($mutability)? EvalContext<'a, 'mir, 'tcx, M>;
// Recursive actions, ready to be overloaded.
/// Visits the given value, dispatching as appropriate to more specialized visitors.
if recursion_depth > *tcx.sess.recursion_limit.get() {
let error = format!("reached the recursion limit while instantiating `{}`",
instance);
- if let Some(node_id) = tcx.hir().as_local_node_id(def_id) {
- tcx.sess.span_fatal(tcx.hir().span(node_id), &error);
+ if let Some(hir_id) = tcx.hir().as_local_hir_id(def_id) {
+ tcx.sess.span_fatal(tcx.hir().span_by_hir_id(hir_id), &error);
} else {
tcx.sess.fatal(&error);
}
let instance_name = instance.to_string();
let msg = format!("reached the type-length limit while instantiating `{:.64}...`",
instance_name);
- let mut diag = if let Some(node_id) = tcx.hir().as_local_node_id(instance.def_id()) {
- tcx.sess.struct_span_fatal(tcx.hir().span(node_id), &msg)
+ let mut diag = if let Some(hir_id) = tcx.hir().as_local_hir_id(instance.def_id()) {
+ tcx.sess.struct_span_fatal(tcx.hir().span_by_hir_id(hir_id), &msg)
} else {
tcx.sess.struct_fatal(&msg)
};
| "add_with_overflow"
| "sub_with_overflow"
| "mul_with_overflow"
+ | "saturating_add"
+ | "saturating_sub"
// no need to check feature gates, intrinsics are only callable
// from the libstd or with forever unstable feature gates
=> is_const_fn = true,
// Check `#[unstable]` const fns or `#[rustc_const_unstable]`
// functions without the feature gate active in this crate in
// order to report a better error message than the one below.
- if self.span.allows_unstable() {
+ if self.span.allows_unstable(&feature.as_str()) {
// `allow_internal_unstable` can make such calls stable.
is_const_fn = true;
} else {
| "overflowing_add" // ~> .wrapping_add
| "overflowing_sub" // ~> .wrapping_sub
| "overflowing_mul" // ~> .wrapping_mul
+ | "saturating_add" // ~> .saturating_add
+ | "saturating_sub" // ~> .saturating_sub
| "unchecked_shl" // ~> .wrapping_shl
| "unchecked_shr" // ~> .wrapping_shr
| "rotate_left" // ~> .rotate_left
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
#![recursion_limit="256"]
edition,
}
}
- IdentTT(ext, _, allow_internal_unstable) => {
- IdentTT(ext, Some(self.krate_span), allow_internal_unstable)
+ IdentTT { expander, span: _, allow_internal_unstable } => {
+ IdentTT { expander, span: Some(self.krate_span), allow_internal_unstable }
}
_ => extension,
}));
self.register_syntax_extension(Symbol::intern(name), NormalTT {
expander: Box::new(expander),
def_info: None,
- allow_internal_unstable: false,
+ allow_internal_unstable: None,
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
#![deny(rust_2018_idioms)]
+#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
#![recursion_limit="256"]
//
// Unused trait imports can't be checked until the method resolution. We save
// candidates here, and do the actual check in librustc_typeck/check_unused.rs.
+//
+// Checking for unused imports is split into three steps:
+//
+// - `UnusedImportCheckVisitor` walks the AST to find all the unused imports
+// inside of `UseTree`s, recording their `NodeId`s and grouping them by
+// the parent `use` item
+//
+// - `calc_unused_spans` then walks over all the `use` items marked in the
+// previous step to collect the spans associated with the `NodeId`s and to
+// calculate the spans that can be removed by rustfix; This is done in a
+// separate step to be able to collapse the adjacent spans that rustfix
+// will remove
+//
+// - `check_crate` finally emits the diagnostics based on the data generated
+// in the last step
use std::ops::{Deref, DerefMut};
use crate::Resolver;
use crate::resolve_imports::ImportDirectiveSubclass;
-use rustc::{lint, ty};
use rustc::util::nodemap::NodeMap;
+use rustc::{lint, ty};
+use rustc_data_structures::fx::FxHashSet;
use syntax::ast;
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, MultiSpan, DUMMY_SP};
+struct UnusedImport<'a> {
+ use_tree: &'a ast::UseTree,
+ use_tree_id: ast::NodeId,
+ item_span: Span,
+ unused: FxHashSet<ast::NodeId>,
+}
+
+impl<'a> UnusedImport<'a> {
+ fn add(&mut self, id: ast::NodeId) {
+ self.unused.insert(id);
+ }
+}
struct UnusedImportCheckVisitor<'a, 'b: 'a> {
resolver: &'a mut Resolver<'b>,
/// All the (so far) unused imports, grouped path list
- unused_imports: NodeMap<NodeMap<Span>>,
+ unused_imports: NodeMap<UnusedImport<'a>>,
+ base_use_tree: Option<&'a ast::UseTree>,
base_id: ast::NodeId,
item_span: Span,
}
impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> {
// We have information about whether `use` (import) directives are actually
// used now. If an import is not used at all, we signal a lint error.
- fn check_import(&mut self, item_id: ast::NodeId, id: ast::NodeId, span: Span) {
+ fn check_import(&mut self, id: ast::NodeId) {
let mut used = false;
self.per_ns(|this, ns| used |= this.used_imports.contains(&(id, ns)));
if !used {
// Check later.
return;
}
- self.unused_imports.entry(item_id).or_default().insert(id, span);
+ self.unused_import(self.base_id).add(id);
} else {
// This trait import is definitely used, in a way other than
// method resolution.
self.maybe_unused_trait_imports.remove(&id);
- if let Some(i) = self.unused_imports.get_mut(&item_id) {
- i.remove(&id);
+ if let Some(i) = self.unused_imports.get_mut(&self.base_id) {
+ i.unused.remove(&id);
}
}
}
+
+ fn unused_import(&mut self, id: ast::NodeId) -> &mut UnusedImport<'a> {
+ let use_tree_id = self.base_id;
+ let use_tree = self.base_use_tree.unwrap();
+ let item_span = self.item_span;
+
+ self.unused_imports
+ .entry(id)
+ .or_insert_with(|| UnusedImport {
+ use_tree,
+ use_tree_id,
+ item_span,
+ unused: FxHashSet::default(),
+ })
+ }
}
impl<'a, 'b> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b> {
// This allows the grouping of all the lints in the same item
if !nested {
self.base_id = id;
+ self.base_use_tree = Some(use_tree);
}
if let ast::UseTreeKind::Nested(ref items) = use_tree.kind {
- // If it's the parent group, cover the entire use item
- let span = if nested {
- use_tree.span
- } else {
- self.item_span
- };
-
if items.is_empty() {
- self.unused_imports
- .entry(self.base_id)
- .or_default()
- .insert(id, span);
+ self.unused_import(self.base_id).add(id);
}
} else {
- let base_id = self.base_id;
- self.check_import(base_id, id, use_tree.span);
+ self.check_import(id);
}
visit::walk_use_tree(self, use_tree, id);
}
}
+enum UnusedSpanResult {
+ Used,
+ FlatUnused(Span, Span),
+ NestedFullUnused(Vec<Span>, Span),
+ NestedPartialUnused(Vec<Span>, Vec<Span>),
+}
+
+fn calc_unused_spans(
+ unused_import: &UnusedImport<'_>,
+ use_tree: &ast::UseTree,
+ use_tree_id: ast::NodeId,
+) -> UnusedSpanResult {
+ // The full span is the whole item's span if this current tree is not nested inside another
+ // This tells rustfix to remove the whole item if all the imports are unused
+ let full_span = if unused_import.use_tree.span == use_tree.span {
+ unused_import.item_span
+ } else {
+ use_tree.span
+ };
+ match use_tree.kind {
+ ast::UseTreeKind::Simple(..) | ast::UseTreeKind::Glob => {
+ if unused_import.unused.contains(&use_tree_id) {
+ UnusedSpanResult::FlatUnused(use_tree.span, full_span)
+ } else {
+ UnusedSpanResult::Used
+ }
+ }
+ ast::UseTreeKind::Nested(ref nested) => {
+ if nested.len() == 0 {
+ return UnusedSpanResult::FlatUnused(use_tree.span, full_span);
+ }
+
+ let mut unused_spans = Vec::new();
+ let mut to_remove = Vec::new();
+ let mut all_nested_unused = true;
+ let mut previous_unused = false;
+ for (pos, (use_tree, use_tree_id)) in nested.iter().enumerate() {
+ let remove = match calc_unused_spans(unused_import, use_tree, *use_tree_id) {
+ UnusedSpanResult::Used => {
+ all_nested_unused = false;
+ None
+ }
+ UnusedSpanResult::FlatUnused(span, remove) => {
+ unused_spans.push(span);
+ Some(remove)
+ }
+ UnusedSpanResult::NestedFullUnused(mut spans, remove) => {
+ unused_spans.append(&mut spans);
+ Some(remove)
+ }
+ UnusedSpanResult::NestedPartialUnused(mut spans, mut to_remove_extra) => {
+ all_nested_unused = false;
+ unused_spans.append(&mut spans);
+ to_remove.append(&mut to_remove_extra);
+ None
+ }
+ };
+ if let Some(remove) = remove {
+ let remove_span = if nested.len() == 1 {
+ remove
+ } else if pos == nested.len() - 1 || !all_nested_unused {
+ // Delete everything from the end of the last import, to delete the
+ // previous comma
+ nested[pos - 1].0.span.shrink_to_hi().to(use_tree.span)
+ } else {
+ // Delete everything until the next import, to delete the trailing commas
+ use_tree.span.to(nested[pos + 1].0.span.shrink_to_lo())
+ };
+
+ // Try to collapse adjacent spans into a single one. This prevents all cases of
+ // overlapping removals, which are not supported by rustfix
+ if previous_unused && !to_remove.is_empty() {
+ let previous = to_remove.pop().unwrap();
+ to_remove.push(previous.to(remove_span));
+ } else {
+ to_remove.push(remove_span);
+ }
+ }
+ previous_unused = remove.is_some();
+ }
+ if unused_spans.is_empty() {
+ UnusedSpanResult::Used
+ } else if all_nested_unused {
+ UnusedSpanResult::NestedFullUnused(unused_spans, full_span)
+ } else {
+ UnusedSpanResult::NestedPartialUnused(unused_spans, to_remove)
+ }
+ }
+ }
+}
+
pub fn check_crate(resolver: &mut Resolver<'_>, krate: &ast::Crate) {
for directive in resolver.potentially_unused_imports.iter() {
match directive.subclass {
let mut visitor = UnusedImportCheckVisitor {
resolver,
unused_imports: Default::default(),
+ base_use_tree: None,
base_id: ast::DUMMY_NODE_ID,
item_span: DUMMY_SP,
};
visit::walk_crate(&mut visitor, krate);
- for (id, spans) in &visitor.unused_imports {
+ for unused in visitor.unused_imports.values() {
+ let mut fixes = Vec::new();
+ let mut spans = match calc_unused_spans(unused, unused.use_tree, unused.use_tree_id) {
+ UnusedSpanResult::Used => continue,
+ UnusedSpanResult::FlatUnused(span, remove) => {
+ fixes.push((remove, String::new()));
+ vec![span]
+ }
+ UnusedSpanResult::NestedFullUnused(spans, remove) => {
+ fixes.push((remove, String::new()));
+ spans
+ }
+ UnusedSpanResult::NestedPartialUnused(spans, remove) => {
+ for fix in &remove {
+ fixes.push((*fix, String::new()));
+ }
+ spans
+ }
+ };
+
let len = spans.len();
- let mut spans = spans.values().cloned().collect::<Vec<Span>>();
spans.sort();
let ms = MultiSpan::from_spans(spans.clone());
let mut span_snippets = spans.iter()
} else {
String::new()
});
- visitor.session.buffer_lint(lint::builtin::UNUSED_IMPORTS, *id, ms, &msg);
+
+ let fix_msg = if fixes.len() == 1 && fixes[0].0 == unused.item_span {
+ "remove the whole `use` item"
+ } else if spans.len() > 1 {
+ "remove the unused imports"
+ } else {
+ "remove the unused import"
+ };
+
+ visitor.session.buffer_lint_with_diagnostic(
+ lint::builtin::UNUSED_IMPORTS,
+ unused.use_tree_id,
+ ms,
+ &msg,
+ lint::builtin::BuiltinLintDiagnostics::UnusedImports(fix_msg.into(), fixes),
+ );
}
}
#![feature(crate_visibility_modifier)]
#![feature(label_break_value)]
+#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_sort_by_cached_key)]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(custom_attribute)]
+#![feature(nll)]
#![deny(rust_2018_idioms)]
#![allow(unused_attributes)]
#![sanitizer_runtime]
+#![feature(nll)]
#![feature(sanitizer_runtime)]
#![feature(staged_api)]
#![no_std]
authors = ["The Rust Project Developers"]
name = "rustc_typeck"
version = "0.0.0"
+edition = "2018"
[lib]
name = "rustc_typeck"
log = "0.4"
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
rustc_target = { path = "../librustc_target" }
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
syntax = { path = "../libsyntax" }
//! instance of `AstConv`.
use errors::{Applicability, DiagnosticId};
-use hir::{self, GenericArg, GenericArgs};
-use hir::def::Def;
-use hir::def_id::DefId;
-use hir::HirVec;
-use lint;
-use middle::resolve_lifetime as rl;
-use namespace::Namespace;
+use crate::hir::{self, GenericArg, GenericArgs};
+use crate::hir::def::Def;
+use crate::hir::def_id::DefId;
+use crate::hir::HirVec;
+use crate::lint;
+use crate::middle::resolve_lifetime as rl;
+use crate::namespace::Namespace;
use rustc::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS;
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt, ToPredicate, TypeFoldable};
use rustc::ty::wf::object_region_bounds;
use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi;
-use require_c_abi_if_variadic;
+use crate::require_c_abi_if_variadic;
use smallvec::SmallVec;
use syntax::ast;
use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::ptr::P;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{DUMMY_SP, Span, MultiSpan};
-use util::common::ErrorReported;
-use util::nodemap::FxHashMap;
+use crate::util::common::ErrorReported;
+use crate::util::nodemap::FxHashMap;
use std::collections::BTreeSet;
use std::iter;
{
let tcx = self.tcx();
let lifetime_name = |def_id| {
- tcx.hir().name(tcx.hir().as_local_node_id(def_id).unwrap()).as_interned_str()
+ tcx.hir().name_by_hir_id(tcx.hir().as_local_hir_id(def_id).unwrap()).as_interned_str()
};
let r = match tcx.named_region(lifetime.hir_id) {
assert_eq!(opt_self_ty, None);
self.prohibit_generics(&path.segments);
- let node_id = tcx.hir().as_local_node_id(did).unwrap();
- let item_id = tcx.hir().get_parent_node(node_id);
- let item_def_id = tcx.hir().local_def_id(item_id);
+ let hir_id = tcx.hir().as_local_hir_id(did).unwrap();
+ let item_id = tcx.hir().get_parent_node_by_hir_id(hir_id);
+ let item_def_id = tcx.hir().local_def_id_from_hir_id(item_id);
let generics = tcx.generics_of(item_def_id);
- let index = generics.param_def_id_to_index[&tcx.hir().local_def_id(node_id)];
- tcx.mk_ty_param(index, tcx.hir().name(node_id).as_interned_str())
+ let index = generics.param_def_id_to_index[
+ &tcx.hir().local_def_id_from_hir_id(hir_id)];
+ tcx.mk_ty_param(index, tcx.hir().name_by_hir_id(hir_id).as_interned_str())
}
Def::SelfTy(_, Some(def_id)) => {
// `Self` in impl (we know the concrete type).
-use check::{FnCtxt, Expectation, Diverges, Needs};
-use check::coercion::CoerceMany;
+use crate::check::{FnCtxt, Expectation, Diverges, Needs};
+use crate::check::coercion::CoerceMany;
+use crate::util::nodemap::FxHashMap;
use errors::Applicability;
use rustc::hir::{self, PatKind};
use rustc::hir::def::{Def, CtorKind};
use syntax::ptr::P;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::Span;
-use util::nodemap::FxHashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::cmp;
use super::FnCtxt;
use errors::{DiagnosticBuilder,Applicability};
-use hir::def_id::DefId;
-use lint;
+use crate::hir::def_id::DefId;
+use crate::lint;
use rustc::hir;
use rustc::session::Session;
use rustc::traits;
use rustc::middle::lang_items;
use syntax::ast;
use syntax_pos::Span;
-use util::common::ErrorReported;
+use crate::util::common::ErrorReported;
/// Reifies a cast check to be checked once we have full type information for
/// a function context.
.emit();
}
CastError::SizedUnsizedCast => {
- use structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
+ use crate::structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
SizedUnsizedCastError::new(&fcx.tcx.sess,
self.span,
self.expr_ty,
use super::{check_fn, Expectation, FnCtxt, GeneratorTypes};
-use astconv::AstConv;
-use middle::region;
+use crate::astconv::AstConv;
+use crate::middle::region;
use rustc::hir::def_id::DefId;
use rustc::infer::{InferOk, InferResult};
use rustc::infer::LateBoundRegionConversionTime;
//! sort of a minor point so I've opted to leave it for later -- after all,
//! we may want to adjust precisely when coercions occur.
-use check::{FnCtxt, Needs};
+use crate::check::{FnCtxt, Needs};
use errors::DiagnosticBuilder;
use rustc::hir;
use rustc::hir::def_id::DefId;
in impl_m_type_params.zip(trait_m_type_params)
{
if impl_synthetic != trait_synthetic {
- let impl_node_id = tcx.hir().as_local_node_id(impl_def_id).unwrap();
- let impl_span = tcx.hir().span(impl_node_id);
+ let impl_hir_id = tcx.hir().as_local_hir_id(impl_def_id).unwrap();
+ let impl_span = tcx.hir().span_by_hir_id(impl_hir_id);
let trait_span = tcx.def_span(trait_def_id);
let mut err = struct_span_err!(tcx.sess,
impl_span,
match param.kind {
GenericParamKind::Lifetime { .. } => None,
GenericParamKind::Type { .. } => {
- if param.id == impl_node_id {
+ if param.hir_id == impl_hir_id {
Some(¶m.bounds)
} else {
None
-use check::FnCtxt;
+use crate::check::FnCtxt;
use rustc::infer::InferOk;
use rustc::traits::{ObligationCause, ObligationCauseCode};
-use check::regionck::RegionCtxt;
+use crate::check::regionck::RegionCtxt;
-use hir::def_id::DefId;
+use crate::hir::def_id::DefId;
use rustc::infer::outlives::env::OutlivesEnvironment;
use rustc::infer::{self, InferOk, SuppressRegionErrors};
use rustc::middle::region;
use rustc::traits::{ObligationCause, TraitEngine, TraitEngineExt};
use rustc::ty::subst::{Subst, Substs, UnpackedKind};
use rustc::ty::{self, Ty, TyCtxt};
-use util::common::ErrorReported;
+use crate::util::common::ErrorReported;
use syntax::ast;
use syntax_pos::Span;
// absent. So we report an error that the Drop impl injected a
// predicate that is not present on the struct definition.
- let self_type_node_id = tcx.hir().as_local_node_id(self_type_did).unwrap();
+ let self_type_hir_id = tcx.hir().as_local_hir_id(self_type_did).unwrap();
let drop_impl_span = tcx.def_span(drop_impl_did);
// repeated `contains` calls.
if !assumptions_in_impl_context.contains(&predicate) {
- let item_span = tcx.hir().span(self_type_node_id);
+ let item_span = tcx.hir().span_by_hir_id(self_type_hir_id);
struct_span_err!(
tcx.sess,
drop_impl_span,
use rustc_data_structures::sync::Lrc;
use syntax_pos::Span;
use super::FnCtxt;
-use util::nodemap::FxHashMap;
+use crate::util::nodemap::FxHashMap;
struct InteriorVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
use rustc::traits::{ObligationCause, ObligationCauseCode};
use rustc::ty::{self, TyCtxt, Ty};
use rustc::ty::subst::Subst;
-use require_same_types;
+use crate::require_same_types;
use rustc_target::spec::abi::Abi;
use syntax::symbol::Symbol;
use super::{probe, MethodCallee};
-use astconv::AstConv;
-use check::{FnCtxt, PlaceOp, callee, Needs};
-use hir::GenericArg;
-use hir::def_id::DefId;
+use crate::astconv::AstConv;
+use crate::check::{FnCtxt, PlaceOp, callee, Needs};
+use crate::hir::GenericArg;
+use crate::hir::def_id::DefId;
use rustc::ty::subst::Substs;
use rustc::traits;
use rustc::ty::{self, Ty, GenericParamDefKind};
pub use self::CandidateSource::*;
pub use self::suggest::{SelfSource, TraitInfo};
-use check::FnCtxt;
+use crate::check::FnCtxt;
+use crate::namespace::Namespace;
use errors::{Applicability, DiagnosticBuilder};
-use namespace::Namespace;
use rustc_data_structures::sync::Lrc;
use rustc::hir;
use rustc::hir::def::Def;
use crate::{check_type_alias_enum_variants_enabled};
use self::probe::{IsSuggestion, ProbeScope};
-pub fn provide(providers: &mut ty::query::Providers) {
+pub fn provide(providers: &mut ty::query::Providers<'_>) {
suggest::provide(providers);
probe::provide(providers);
}
use super::{CandidateSource, ImplSource, TraitSource};
use super::suggest;
-use check::autoderef::{self, Autoderef};
-use check::FnCtxt;
-use hir::def_id::DefId;
-use hir::def::Def;
-use namespace::Namespace;
+use crate::check::autoderef::{self, Autoderef};
+use crate::check::FnCtxt;
+use crate::hir::def_id::DefId;
+use crate::hir::def::Def;
+use crate::namespace::Namespace;
use rustc_data_structures::sync::Lrc;
use rustc::hir;
//! Give useful errors and suggestions to users when an item can't be
//! found or is otherwise invalid.
-use check::FnCtxt;
+use crate::check::FnCtxt;
+use crate::middle::lang_items::FnOnceTraitLangItem;
+use crate::namespace::Namespace;
+use crate::util::nodemap::FxHashSet;
use errors::{Applicability, DiagnosticBuilder};
-use middle::lang_items::FnOnceTraitLangItem;
-use namespace::Namespace;
use rustc_data_structures::sync::Lrc;
use rustc::hir::{self, ExprKind, Node, QPath};
use rustc::hir::def::Def;
use rustc::traits::Obligation;
use rustc::ty::{self, Adt, Ty, TyCtxt, ToPolyTraitRef, ToPredicate, TypeFoldable};
use rustc::ty::item_path::with_crate_prefix;
-use util::nodemap::FxHashSet;
use syntax_pos::{Span, FileName};
use syntax::ast;
use syntax::util::lev_distance::find_best_match_for_name;
pub mod intrinsic;
mod op;
-use astconv::{AstConv, PathSeg};
+use crate::astconv::{AstConv, PathSeg};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
use rustc::hir::{self, ExprKind, GenericArg, ItemKind, Node, PatKind, QPath};
use rustc::hir::def::{CtorKind, Def};
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use middle::lang_items;
-use namespace::Namespace;
+use crate::middle::lang_items;
+use crate::namespace::Namespace;
use rustc::infer::{self, InferCtxt, InferOk, InferResult, RegionVariableOrigin};
use rustc::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
use rustc_data_structures::indexed_vec::Idx;
use std::ops::{self, Deref};
use std::slice;
-use require_c_abi_if_variadic;
-use session::{CompileIncomplete, Session};
-use session::config::EntryFnType;
-use TypeAndSubsts;
-use lint;
-use util::captures::Captures;
-use util::common::{ErrorReported, indenter};
-use util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, NodeMap};
+use crate::require_c_abi_if_variadic;
+use crate::session::{CompileIncomplete, Session};
+use crate::session::config::EntryFnType;
+use crate::TypeAndSubsts;
+use crate::lint;
+use crate::util::captures::Captures;
+use crate::util::common::{ErrorReported, indenter};
+use crate::util::nodemap::{DefIdMap, DefIdSet, FxHashMap, FxHashSet, NodeMap};
pub use self::Expectation::*;
use self::autoderef::Autoderef;
// Check for duplicate discriminant values
if let Some(i) = disr_vals.iter().position(|&x| x.val == discr.val) {
let variant_did = def.variants[VariantIdx::new(i)].did;
- let variant_i_node_id = tcx.hir().as_local_node_id(variant_did).unwrap();
- let variant_i = tcx.hir().expect_variant(variant_i_node_id);
+ let variant_i_hir_id = tcx.hir().as_local_hir_id(variant_did).unwrap();
+ let variant_i = tcx.hir().expect_variant(variant_i_hir_id);
let i_span = match variant_i.node.disr_expr {
- Some(ref expr) => tcx.hir().span(expr.id),
- None => tcx.hir().span(variant_i_node_id)
+ Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
+ None => tcx.hir().span_by_hir_id(variant_i_hir_id)
};
let span = match v.node.disr_expr {
- Some(ref expr) => tcx.hir().span(expr.id),
+ Some(ref expr) => tcx.hir().span_by_hir_id(expr.hir_id),
None => v.span
};
struct_span_err!(tcx.sess, span, E0081,
// arguments which we skipped above.
if variadic {
fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) {
- use structured_errors::{VariadicError, StructuredDiagnostic};
+ use crate::structured_errors::{VariadicError, StructuredDiagnostic};
VariadicError::new(s, span, t, cast_ty).diagnostic().emit();
}
display
}
- fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS)
- -> DiagnosticBuilder {
+ fn no_such_field_err<T: Display>(&self, span: Span, field: T, expr_t: &ty::TyS<'_>)
+ -> DiagnosticBuilder<'_> {
type_error_struct!(self.tcx().sess, span, expr_t, E0609,
"no field `{}` on type `{}`",
field, expr_t)
&self,
blk: &'gcx hir::Block,
expected_ty: Ty<'tcx>,
- err: &mut DiagnosticBuilder,
+ err: &mut DiagnosticBuilder<'_>,
) {
if let Some(span_semi) = self.could_remove_semicolon(blk, expected_ty) {
err.span_suggestion(
});
for (&used, param) in types_used.iter().zip(types) {
if !used {
- let id = tcx.hir().as_local_node_id(param.def_id).unwrap();
- let span = tcx.hir().span(id);
+ let id = tcx.hir().as_local_hir_id(param.def_id).unwrap();
+ let span = tcx.hir().span_by_hir_id(id);
struct_span_err!(tcx.sess, span, E0091, "type parameter `{}` is unused", param.name)
.span_label(span, "unused type parameter")
.emit();
);
handler.note_without_error(&format!("rustc {} running on {}",
option_env!("CFG_VERSION").unwrap_or("unknown_version"),
- ::session::config::host_triple(),
+ crate::session::config::host_triple(),
));
}
//! relation, except that a borrowed pointer never owns its
//! contents.
-use check::dropck;
-use check::FnCtxt;
-use middle::mem_categorization as mc;
-use middle::mem_categorization::Categorization;
-use middle::region;
+use crate::check::dropck;
+use crate::check::FnCtxt;
+use crate::middle::mem_categorization as mc;
+use crate::middle::mem_categorization::Categorization;
+use crate::middle::region;
use rustc::hir::def_id::DefId;
use rustc::infer::outlives::env::OutlivesEnvironment;
use rustc::infer::{self, RegionObligation, SuppressRegionErrors};
use super::FnCtxt;
-use middle::expr_use_visitor as euv;
-use middle::mem_categorization as mc;
-use middle::mem_categorization::Categorization;
+use crate::middle::expr_use_visitor as euv;
+use crate::middle::mem_categorization as mc;
+use crate::middle::mem_categorization::Categorization;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::hir::def_id::LocalDefId;
}
fn var_name(tcx: TyCtxt, var_hir_id: hir::HirId) -> ast::Name {
- let var_node_id = tcx.hir().hir_to_node_id(var_hir_id);
- tcx.hir().name(var_node_id)
+ tcx.hir().name_by_hir_id(var_hir_id)
}
-use check::{Inherited, FnCtxt};
-use constrained_type_params::{identify_constrained_type_params, Parameter};
+use crate::check::{Inherited, FnCtxt};
+use crate::constrained_type_params::{identify_constrained_type_params, Parameter};
-use hir::def_id::DefId;
+use crate::hir::def_id::DefId;
use rustc::traits::{self, ObligationCauseCode};
use rustc::ty::{self, Lift, Ty, TyCtxt, TyKind, GenericParamDefKind, TypeFoldable, ToPredicate};
use rustc::ty::subst::{Subst, Substs};
/// not included it frequently leads to confusing errors in fn bodies. So it's better to check
/// the types first.
pub fn check_item_well_formed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) {
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- let item = tcx.hir().expect_item(node_id);
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ let item = tcx.hir().expect_item_by_hir_id(hir_id);
- debug!("check_item_well_formed(it.id={}, it.name={})",
- item.id,
+ debug!("check_item_well_formed(it.hir_id={:?}, it.name={})",
+ item.hir_id,
tcx.item_path_str(def_id));
match item.node {
// won't be allowed unless there's an *explicit* implementation of `Send`
// for `T`
hir::ItemKind::Impl(_, polarity, defaultness, _, ref trait_ref, ref self_ty, _) => {
- let is_auto = tcx.impl_trait_ref(tcx.hir().local_def_id(item.id))
+ let is_auto = tcx.impl_trait_ref(tcx.hir().local_def_id_from_hir_id(item.hir_id))
.map_or(false, |trait_ref| tcx.trait_is_auto(trait_ref.def_id));
if let (hir::Defaultness::Default { .. }, true) = (defaultness, is_auto) {
tcx.sess.span_err(item.span, "impls of auto traits cannot be default");
// unresolved type variables and replaces "ty_var" types with their
// substitutions.
-use check::FnCtxt;
+use crate::check::FnCtxt;
use errors::DiagnosticBuilder;
use rustc::hir;
use rustc::hir::def_id::{DefId, DefIndex};
if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
if self.rustc_dump_user_substs {
// This is a unit-testing mechanism.
- let node_id = self.tcx().hir().hir_to_node_id(hir_id);
- let span = self.tcx().hir().span(node_id);
+ let span = self.tcx().hir().span_by_hir_id(hir_id);
// We need to buffer the errors in order to guarantee a consistent
// order when emitting them.
let err = self.tcx().sess.struct_span_err(
impl Locatable for DefIndex {
fn to_span(&self, tcx: &TyCtxt) -> Span {
- let node_id = tcx.hir().def_index_to_node_id(*self);
- tcx.hir().span(node_id)
+ let hir_id = tcx.hir().def_index_to_hir_id(*self);
+ tcx.hir().span_by_hir_id(hir_id)
}
}
impl Locatable for hir::HirId {
fn to_span(&self, tcx: &TyCtxt) -> Span {
- let node_id = tcx.hir().hir_to_node_id(*self);
- tcx.hir().span(node_id)
+ tcx.hir().span_by_hir_id(*self)
}
}
-use lint;
+use crate::lint;
use rustc::ty::TyCtxt;
use errors::Applicability;
fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_did: DefId) {
debug!("visit_implementation_of_copy: impl_did={:?}", impl_did);
- let impl_node_id = if let Some(n) = tcx.hir().as_local_node_id(impl_did) {
+ let impl_hir_id = if let Some(n) = tcx.hir().as_local_hir_id(impl_did) {
n
} else {
debug!("visit_implementation_of_copy(): impl not in this crate");
debug!("visit_implementation_of_copy: self_type={:?} (bound)",
self_type);
- let span = tcx.hir().span(impl_node_id);
+ let span = tcx.hir().span_by_hir_id(impl_hir_id);
let param_env = tcx.param_env(impl_did);
assert!(!self_type.has_escaping_bound_vars());
match param_env.can_type_implement_copy(tcx, self_type) {
Ok(()) => {}
Err(CopyImplementationError::InfrigingFields(fields)) => {
- let item = tcx.hir().expect_item(impl_node_id);
+ let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
let span = if let ItemKind::Impl(.., Some(ref tr), _, _) = item.node {
tr.path.span
} else {
err.emit()
}
Err(CopyImplementationError::NotAnAdt) => {
- let item = tcx.hir().expect_item(impl_node_id);
+ let item = tcx.hir().expect_item_by_hir_id(impl_hir_id);
let span = if let ItemKind::Impl(.., ref ty, _) = item.node {
ty.span
} else {
-use namespace::Namespace;
+use crate::namespace::Namespace;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::traits::{self, IntercrateMode};
use rustc::ty::TyCtxt;
-use lint;
+use crate::lint;
pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum) {
// done by the orphan and overlap modules. Then we build up various
// mappings. That mapping code resides here.
-use hir::def_id::{DefId, LOCAL_CRATE};
+use crate::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::traits;
use rustc::ty::{self, TyCtxt, TypeFoldable};
use rustc::ty::query::Providers;
//! At present, however, we do run collection across all items in the
//! crate as a kind of pass. This should eventually be factored away.
-use astconv::{AstConv, Bounds};
-use constrained_type_params as ctp;
-use check::intrinsic::intrisic_operation_unsafety;
-use lint;
-use middle::lang_items::SizedTraitLangItem;
-use middle::resolve_lifetime as rl;
-use middle::weak_lang_items;
+use crate::astconv::{AstConv, Bounds};
+use crate::constrained_type_params as ctp;
+use crate::check::intrinsic::intrisic_operation_unsafety;
+use crate::lint;
+use crate::middle::lang_items::SizedTraitLangItem;
+use crate::middle::resolve_lifetime as rl;
+use crate::middle::weak_lang_items;
use rustc::mir::mono::Linkage;
use rustc::ty::query::Providers;
use rustc::ty::subst::Substs;
);
}
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
type_of,
generics_of,
}
fn trait_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx ty::TraitDef {
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- let item = tcx.hir().expect_item(node_id);
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ let item = tcx.hir().expect_item_by_hir_id(hir_id);
let (is_auto, unsafety) = match item.node {
hir::ItemKind::Trait(is_auto, unsafety, ..) => (is_auto == hir::IsAuto::Yes, unsafety),
) -> Option<ty::TraitRef<'tcx>> {
let icx = ItemCtxt::new(tcx, def_id);
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- match tcx.hir().expect_item(node_id).node {
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ match tcx.hir().expect_item_by_hir_id(hir_id).node {
hir::ItemKind::Impl(.., ref opt_trait_ref, _, _) => {
opt_trait_ref.as_ref().map(|ast_trait_ref| {
let selfty = tcx.type_of(def_id);
}
fn impl_polarity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> hir::ImplPolarity {
- let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
- match tcx.hir().expect_item(node_id).node {
+ let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
+ match tcx.hir().expect_item_by_hir_id(hir_id).node {
hir::ItemKind::Impl(_, polarity, ..) => polarity,
ref item => bug!("impl_polarity: {:?} not an impl", item),
}
/// which is determined by 1, which requires `U`, that is determined
/// by 0. I should probably pick a less tangled example, but I can't
/// think of any.
-pub fn setup_constraining_predicates<'tcx>(tcx: TyCtxt,
+pub fn setup_constraining_predicates<'tcx>(tcx: TyCtxt<'_, '_, '_>,
predicates: &mut [(ty::Predicate<'tcx>, Span)],
impl_trait_ref: Option<ty::TraitRef<'tcx>>,
input_parameters: &mut FxHashSet<Parameter>)
//! specialization errors. These things can (and probably should) be
//! fixed, but for the moment it's easier to do these checks early.
-use constrained_type_params as ctp;
+use crate::constrained_type_params as ctp;
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::hir::def_id::DefId;
// used elsewhere are not projected back out.
}
-fn report_unused_parameter(tcx: TyCtxt,
+fn report_unused_parameter(tcx: TyCtxt<'_, '_, '_>,
span: Span,
kind: &str,
name: &str)
#![recursion_limit="256"]
+#![deny(rust_2018_idioms)]
+#![allow(explicit_outlives_requirements)]
+
+#![allow(elided_lifetimes_in_paths)] // WIP
+
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
-extern crate syntax_pos;
-
-extern crate arena;
#[macro_use] extern crate rustc;
-extern crate rustc_data_structures;
-extern crate rustc_errors as errors;
-extern crate rustc_target;
-extern crate smallvec;
// N.B., this module needs to be declared first so diagnostics are
// registered before they are used.
}
}
-fn require_c_abi_if_variadic(tcx: TyCtxt,
+fn require_c_abi_if_variadic(tcx: TyCtxt<'_, '_, '_>,
decl: &hir::FnDecl,
abi: Abi,
span: Span) {
}
}
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
collect::provide(providers);
coherence::provide(providers);
check::provide(providers);
use rustc::hir::def_id::DefId;
use rustc::ty::{self, OutlivesPredicate, TyCtxt};
-use util::nodemap::FxHashMap;
+use crate::util::nodemap::FxHashMap;
use super::utils::*;
-use rustc::hir;
-use hir::Node;
+use rustc::hir::{self, Node};
use rustc::hir::def_id::DefId;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ty::subst::{Kind, Subst, UnpackedKind};
pub mod test;
mod utils;
-pub fn provide(providers: &mut Providers) {
+pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
inferred_outlives_of,
inferred_outlives_crate,
fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
-> Lrc<Vec<ty::Variance>> {
- let id = tcx.hir().as_local_node_id(item_def_id).expect("expected local def-id");
+ let id = tcx.hir().as_local_hir_id(item_def_id).expect("expected local def-id");
let unsupported = || {
// Variance not relevant.
- span_bug!(tcx.hir().span(id), "asked to compute variance for wrong kind of item")
+ span_bug!(tcx.hir().span_by_hir_id(id), "asked to compute variance for wrong kind of item")
};
- match tcx.hir().get(id) {
+ match tcx.hir().get_by_hir_id(id) {
Node::Item(item) => match item.node {
hir::ItemKind::Enum(..) |
hir::ItemKind::Struct(..) |
use syntax::ast;
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use util::nodemap::NodeMap;
+use crate::util::nodemap::NodeMap;
use self::VarianceTerm::*;
StaticItem(Static),
ConstantItem(Constant),
TraitItem(Trait),
+ TraitAliasItem(TraitAlias),
ImplItem(Impl),
/// A method signature only. Used for required methods in traits (ie,
/// non-default-methods).
ItemEnum::TyMethodItem(ref i) => &i.generics,
ItemEnum::MethodItem(ref i) => &i.generics,
ItemEnum::ForeignFunctionItem(ref f) => &f.generics,
+ ItemEnum::TraitAliasItem(ref ta) => &ta.generics,
_ => return None,
})
}
items.extend(self.impls.iter().flat_map(|x| x.clean(cx)));
items.extend(self.macros.iter().map(|x| x.clean(cx)));
items.extend(self.proc_macros.iter().map(|x| x.clean(cx)));
+ items.extend(self.trait_aliases.iter().map(|x| x.clean(cx)));
// determine if we should display the inner contents or
// the outer `mod` item for the source code.
pub fn self_type(&self) -> Option<SelfTy> {
self.inputs.values.get(0).and_then(|v| v.to_self())
}
+
+ /// Returns the sugared return type for an async function.
+ ///
+ /// For example, if the return type is `impl std::future::Future<Output = i32>`, this function
+ /// will return `i32`.
+ ///
+ /// # Panics
+ ///
+ /// This function will panic if the return type does not match the expected sugaring for async
+ /// functions.
+ pub fn sugared_async_return_type(&self) -> FunctionRetTy {
+ match &self.output {
+ FunctionRetTy::Return(Type::ImplTrait(bounds)) => {
+ match &bounds[0] {
+ GenericBound::TraitBound(PolyTrait { trait_, .. }, ..) => {
+ let bindings = trait_.bindings().unwrap();
+ FunctionRetTy::Return(bindings[0].ty.clone())
+ }
+ _ => panic!("unexpected desugaring of async function"),
+ }
+ }
+ _ => panic!("unexpected desugaring of async function"),
+ }
+ }
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
items: self.items.clean(cx),
generics: self.generics.clean(cx),
bounds: self.bounds.clean(cx),
- is_spotlight: is_spotlight,
+ is_spotlight,
is_auto: self.is_auto.clean(cx),
}),
}
}
}
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
+pub struct TraitAlias {
+ pub generics: Generics,
+ pub bounds: Vec<GenericBound>,
+}
+
+impl Clean<Item> for doctree::TraitAlias {
+ fn clean(&self, cx: &DocContext) -> Item {
+ let attrs = self.attrs.clean(cx);
+ Item {
+ name: Some(self.name.clean(cx)),
+ attrs,
+ source: self.whence.clean(cx),
+ def_id: cx.tcx.hir().local_def_id(self.id),
+ visibility: self.vis.clean(cx),
+ stability: self.stab.clean(cx),
+ deprecation: self.depr.clean(cx),
+ inner: TraitAliasItem(TraitAlias {
+ generics: self.generics.clean(cx),
+ bounds: self.bounds.clean(cx),
+ }),
+ }
+ }
+}
+
impl Clean<bool> for hir::IsAuto {
fn clean(&self, _: &DocContext) -> bool {
match *self {
Macro,
Attr,
Derive,
+ TraitAlias,
}
pub trait GetDefId {
_ => None,
}
}
+
+ pub fn bindings(&self) -> Option<&[TypeBinding]> {
+ match *self {
+ ResolvedPath { ref path, .. } => {
+ path.segments.last().and_then(|seg| {
+ if let GenericArgs::AngleBracketed { ref bindings, .. } = seg.args {
+ Some(&**bindings)
+ } else {
+ None
+ }
+ })
+ }
+ _ => None
+ }
+ }
}
impl GetDefId for Type {
MacroKind::Derive => (i, TypeKind::Derive),
MacroKind::ProcMacroStub => unreachable!(),
},
+ Def::TraitAlias(i) => (i, TypeKind::TraitAlias),
Def::SelfTy(Some(def_id), _) => (def_id, TypeKind::Trait),
- Def::SelfTy(_, Some(impl_def_id)) => {
- return impl_def_id
- }
+ Def::SelfTy(_, Some(impl_def_id)) => return impl_def_id,
_ => return def.def_id()
};
if did.is_local() { return did }
pub foreigns: Vec<hir::ForeignMod>,
pub macros: Vec<Macro>,
pub proc_macros: Vec<ProcMacro>,
+ pub trait_aliases: Vec<TraitAlias>,
pub is_crate: bool,
}
where_inner: syntax_pos::DUMMY_SP,
attrs : hir::HirVec::new(),
extern_crates: Vec::new(),
- imports : Vec::new(),
- structs : Vec::new(),
- unions : Vec::new(),
- enums : Vec::new(),
- fns : Vec::new(),
- mods : Vec::new(),
- typedefs : Vec::new(),
- existentials: Vec::new(),
- statics : Vec::new(),
- constants : Vec::new(),
- traits : Vec::new(),
- impls : Vec::new(),
- foreigns : Vec::new(),
- macros : Vec::new(),
- proc_macros: Vec::new(),
+ imports : Vec::new(),
+ structs : Vec::new(),
+ unions : Vec::new(),
+ enums : Vec::new(),
+ fns : Vec::new(),
+ mods : Vec::new(),
+ typedefs : Vec::new(),
+ existentials: Vec::new(),
+ statics : Vec::new(),
+ constants : Vec::new(),
+ traits : Vec::new(),
+ impls : Vec::new(),
+ foreigns : Vec::new(),
+ macros : Vec::new(),
+ proc_macros: Vec::new(),
+ trait_aliases: Vec::new(),
is_crate : false,
}
}
pub depr: Option<attr::Deprecation>,
}
+pub struct TraitAlias {
+ pub name: Name,
+ pub generics: hir::Generics,
+ pub bounds: hir::HirVec<hir::GenericBound>,
+ pub attrs: hir::HirVec<ast::Attribute>,
+ pub id: ast::NodeId,
+ pub whence: Span,
+ pub vis: hir::Visibility,
+ pub stab: Option<attr::Stability>,
+ pub depr: Option<attr::Deprecation>,
+}
+
#[derive(Debug)]
pub struct Impl {
pub unsafety: hir::Unsafety,
//! assume that HTML output is desired, although it may be possible to redesign
//! them in the future to instead emit any format desired.
+use std::borrow::Cow;
use std::fmt;
use rustc::hir::def_id::DefId;
pub struct CommaSep<'a, T: 'a>(pub &'a [T]);
pub struct AbiSpace(pub Abi);
-/// Wrapper struct for properly emitting a method declaration.
-pub struct Method<'a> {
+/// Wrapper struct for properly emitting a function or method declaration.
+pub struct Function<'a> {
/// The declaration to emit.
pub decl: &'a clean::FnDecl,
/// The length of the function's "name", used to determine line-wrapping.
pub name_len: usize,
/// The number of spaces to indent each successive line with, if line-wrapping is necessary.
pub indent: usize,
+ /// Whether the function is async or not.
+ pub asyncness: hir::IsAsync,
}
/// Wrapper struct for emitting a where-clause from Generics.
}
}
-impl<'a> fmt::Display for Method<'a> {
+impl<'a> fmt::Display for Function<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let &Method { decl, name_len, indent } = self;
+ let &Function { decl, name_len, indent, asyncness } = self;
let amp = if f.alternate() { "&" } else { "&" };
let mut args = String::new();
let mut args_plain = String::new();
args_plain.push_str(", ...");
}
- let arrow_plain = format!("{:#}", decl.output);
+ let output = if let hir::IsAsync::Async = asyncness {
+ Cow::Owned(decl.sugared_async_return_type())
+ } else {
+ Cow::Borrowed(&decl.output)
+ };
+
+ let arrow_plain = format!("{:#}", &output);
let arrow = if f.alternate() {
- format!("{:#}", decl.output)
+ format!("{:#}", &output)
} else {
- decl.output.to_string()
+ output.to_string()
};
let pad = " ".repeat(name_len);
Existential = 22,
ProcAttribute = 23,
ProcDerive = 24,
+ TraitAlias = 25,
}
clean::AssociatedTypeItem(..) => ItemType::AssociatedType,
clean::ForeignTypeItem => ItemType::ForeignType,
clean::KeywordItem(..) => ItemType::Keyword,
+ clean::TraitAliasItem(..) => ItemType::TraitAlias,
clean::ProcMacroItem(ref mac) => match mac.kind {
MacroKind::Bang => ItemType::Macro,
MacroKind::Attr => ItemType::ProcAttribute,
impl From<clean::TypeKind> for ItemType {
fn from(kind: clean::TypeKind) -> ItemType {
match kind {
- clean::TypeKind::Struct => ItemType::Struct,
- clean::TypeKind::Union => ItemType::Union,
- clean::TypeKind::Enum => ItemType::Enum,
- clean::TypeKind::Function => ItemType::Function,
- clean::TypeKind::Trait => ItemType::Trait,
- clean::TypeKind::Module => ItemType::Module,
- clean::TypeKind::Static => ItemType::Static,
- clean::TypeKind::Const => ItemType::Constant,
- clean::TypeKind::Variant => ItemType::Variant,
- clean::TypeKind::Typedef => ItemType::Typedef,
- clean::TypeKind::Foreign => ItemType::ForeignType,
- clean::TypeKind::Macro => ItemType::Macro,
- clean::TypeKind::Attr => ItemType::ProcAttribute,
- clean::TypeKind::Derive => ItemType::ProcDerive,
+ clean::TypeKind::Struct => ItemType::Struct,
+ clean::TypeKind::Union => ItemType::Union,
+ clean::TypeKind::Enum => ItemType::Enum,
+ clean::TypeKind::Function => ItemType::Function,
+ clean::TypeKind::Trait => ItemType::Trait,
+ clean::TypeKind::Module => ItemType::Module,
+ clean::TypeKind::Static => ItemType::Static,
+ clean::TypeKind::Const => ItemType::Constant,
+ clean::TypeKind::Variant => ItemType::Variant,
+ clean::TypeKind::Typedef => ItemType::Typedef,
+ clean::TypeKind::Foreign => ItemType::ForeignType,
+ clean::TypeKind::Macro => ItemType::Macro,
+ clean::TypeKind::Attr => ItemType::ProcAttribute,
+ clean::TypeKind::Derive => ItemType::ProcDerive,
+ clean::TypeKind::TraitAlias => ItemType::TraitAlias,
}
}
}
ItemType::Existential => "existential",
ItemType::ProcAttribute => "attr",
ItemType::ProcDerive => "derive",
+ ItemType::TraitAlias => "traitalias",
}
}
ItemType::Primitive |
ItemType::AssociatedType |
ItemType::Existential |
+ ItemType::TraitAlias |
ItemType::ForeignType => NameSpace::Type,
ItemType::ExternCrate |
use html::escape::Escape;
use html::format::{AsyncSpace, ConstnessSpace};
use html::format::{GenericBounds, WhereClause, href, AbiSpace};
-use html::format::{VisSpace, Method, UnsafetySpace, MutableSpace};
+use html::format::{VisSpace, Function, UnsafetySpace, MutableSpace};
use html::format::fmt_impl_for_trait_page;
use html::item_type::ItemType;
use html::markdown::{self, Markdown, MarkdownHtml, MarkdownSummaryLine, ErrorCodes, IdMap};
keywords: FxHashSet<ItemEntry>,
attributes: FxHashSet<ItemEntry>,
derives: FxHashSet<ItemEntry>,
+ trait_aliases: FxHashSet<ItemEntry>,
}
impl AllTypes {
keywords: new_set(100),
attributes: new_set(100),
derives: new_set(100),
+ trait_aliases: new_set(100),
}
}
ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)),
ItemType::ProcAttribute => self.attributes.insert(ItemEntry::new(new_url, name)),
ItemType::ProcDerive => self.derives.insert(ItemEntry::new(new_url, name)),
+ ItemType::TraitAlias => self.trait_aliases.insert(ItemEntry::new(new_url, name)),
_ => true,
};
}
print_entries(f, &self.derives, "Derive Macros", "derives")?;
print_entries(f, &self.functions, "Functions", "functions")?;
print_entries(f, &self.typedefs, "Typedefs", "typedefs")?;
+ print_entries(f, &self.trait_aliases, "Trait Aliases", "trait-aliases")?;
print_entries(f, &self.existentials, "Existentials", "existentials")?;
print_entries(f, &self.statics, "Statics", "statics")?;
print_entries(f, &self.constants, "Constants", "constants")
clean::ForeignTypeItem => write!(fmt, "Foreign Type ")?,
clean::KeywordItem(..) => write!(fmt, "Keyword ")?,
clean::ExistentialItem(..) => write!(fmt, "Existential Type ")?,
+ clean::TraitAliasItem(..) => write!(fmt, "Trait Alias ")?,
_ => {
// We don't generate pages for any other type.
unreachable!();
clean::ForeignTypeItem => item_foreign_type(fmt, self.cx, self.item),
clean::KeywordItem(ref k) => item_keyword(fmt, self.cx, self.item, k),
clean::ExistentialItem(ref e, _) => item_existential(fmt, self.cx, self.item, e),
+ clean::TraitAliasItem(ref ta) => item_trait_alias(fmt, self.cx, self.item, ta),
_ => {
// We don't generate pages for any other type.
unreachable!();
name = it.name.as_ref().unwrap(),
generics = f.generics,
where_clause = WhereClause { gens: &f.generics, indent: 0, end_newline: true },
- decl = Method {
+ decl = Function {
decl: &f.decl,
name_len,
indent: 0,
+ asyncness: f.header.asyncness,
})?;
document(w, cx, it)
}
Ok(())
}
-fn bounds(t_bounds: &[clean::GenericBound]) -> String {
+fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool) -> String {
let mut bounds = String::new();
- let mut bounds_plain = String::new();
if !t_bounds.is_empty() {
- if !bounds.is_empty() {
- bounds.push(' ');
- bounds_plain.push(' ');
+ if !trait_alias {
+ bounds.push_str(": ");
}
- bounds.push_str(": ");
- bounds_plain.push_str(": ");
for (i, p) in t_bounds.iter().enumerate() {
if i > 0 {
bounds.push_str(" + ");
- bounds_plain.push_str(" + ");
}
bounds.push_str(&(*p).to_string());
- bounds_plain.push_str(&format!("{:#}", *p));
}
}
bounds
it: &clean::Item,
t: &clean::Trait,
) -> fmt::Result {
- let bounds = bounds(&t.bounds);
+ let bounds = bounds(&t.bounds, false);
let types = t.items.iter().filter(|m| m.is_associated_type()).collect::<Vec<_>>();
let consts = t.items.iter().filter(|m| m.is_associated_const()).collect::<Vec<_>>();
let required = t.items.iter().filter(|m| m.is_ty_method()).collect::<Vec<_>>();
href = href,
name = name,
generics = *g,
- decl = Method {
+ decl = Function {
decl: d,
name_len: head_len,
indent,
+ asyncness: header.asyncness,
},
where_clause = WhereClause {
gens: g,
it.name.as_ref().unwrap(),
t.generics,
where_clause = WhereClause { gens: &t.generics, indent: 0, end_newline: true },
- bounds = bounds(&t.bounds))?;
+ bounds = bounds(&t.bounds, false))?;
+
+ document(w, cx, it)?;
+
+ // Render any items associated directly to this alias, as otherwise they
+ // won't be visible anywhere in the docs. It would be nice to also show
+ // associated items from the aliased type (see discussion in #32077), but
+ // we need #14072 to make sense of the generics.
+ render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
+}
+
+fn item_trait_alias(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
+ t: &clean::TraitAlias) -> fmt::Result {
+ write!(w, "<pre class='rust trait-alias'>")?;
+ render_attributes(w, it)?;
+ write!(w, "trait {}{}{} = {};</pre>",
+ it.name.as_ref().unwrap(),
+ t.generics,
+ WhereClause { gens: &t.generics, indent: 0, end_newline: true },
+ bounds(&t.bounds, true))?;
document(w, cx, it)?;
ItemType::Existential => ("existentials", "Existentials"),
ItemType::ProcAttribute => ("attributes", "Attribute Macros"),
ItemType::ProcDerive => ("derives", "Derive Macros"),
+ ItemType::TraitAlias => ("trait-aliases", "Trait aliases"),
}
}
"keyword",
"existential",
"attr",
- "derive"];
+ "derive",
+ "traitalias"];
var search_input = document.getElementsByClassName("search-input")[0];
// 2 for "In Return Types"
var currentTab = 0;
- var themesWidth = null;
-
var titleBeforeSearch = document.title;
function getPageId() {
return String.fromCharCode(c);
}
- function displayHelp(display, ev) {
+ function displayHelp(display, ev, help) {
if (display === true) {
if (hasClass(help, "hidden")) {
ev.preventDefault();
hideModal();
var search = document.getElementById("search");
if (hasClass(help, "hidden") === false) {
- displayHelp(false, ev);
+ displayHelp(false, ev, help);
} else if (hasClass(search, "hidden") === false) {
ev.preventDefault();
addClass(search, "hidden");
case "s":
case "S":
- displayHelp(false, ev);
+ displayHelp(false, ev, help);
hideModal();
ev.preventDefault();
focusSearchBar();
case "?":
if (ev.shiftKey) {
hideModal();
- displayHelp(true, ev);
+ displayHelp(true, ev, help);
}
break;
}
return MAX_LEV_DISTANCE + 1;
}
}
- return lev_distance;//Math.ceil(total / done);
+ return Math.ceil(total / done);
}
}
return MAX_LEV_DISTANCE + 1;
block("type", "Type Definitions");
block("foreigntype", "Foreign Types");
block("keyword", "Keywords");
+ block("traitalias", "Trait Aliases");
}
window.initSidebarItems = initSidebarItems;
// for vertical layout (column-oriented flex layout for divs caused
// errors in mobile browsers).
if (e.tagName === "H2" || e.tagName === "H3") {
- let nextTagName = e.nextElementSibling.tagName;
+ var nextTagName = e.nextElementSibling.tagName;
if (nextTagName == "H2" || nextTagName == "H3") {
e.nextElementSibling.style.display = "flex";
} else {
}
.content .highlighted a, .content .highlighted span { color: #eee !important; }
.content .highlighted.trait { background-color: #013191; }
+.content .highlighted.traitalias { background-color: #013191; }
.content .highlighted.mod,
.content .highlighted.externcrate { background-color: #afc6e4; }
.content .highlighted.mod { background-color: #803a1b; }
.content span.externcrate,
.content span.mod, .content a.mod, .block a.current.mod { color: #bda000; }
.content span.trait, .content a.trait, .block a.current.trait { color: #b78cf2; }
+.content span.traitalias, .content a.traitalias, .block a.current.traitalias { color: #b397da; }
.content span.fn, .content a.fn, .block a.current.fn,
.content span.method, .content a.method, .block a.current.method,
.content span.tymethod, .content a.tymethod, .block a.current.tymethod,
}
.content .highlighted a, .content .highlighted span { color: #000 !important; }
.content .highlighted.trait { background-color: #c7b6ff; }
+.content .highlighted.traitalias { background-color: #c7b6ff; }
.content .highlighted.mod,
.content .highlighted.externcrate { background-color: #afc6e4; }
.content .highlighted.enum { background-color: #b4d1b9; }
.content span.externcrate,
.content span.mod, .content a.mod, .block a.current.mod { color: #4d76ae; }
.content span.trait, .content a.trait, .block a.current.trait { color: #7c5af3; }
+.content span.traitalias, .content a.traitalias, .block a.current.traitalias { color: #6841f1; }
.content span.fn, .content a.fn, .block a.current.fn,
.content span.method, .content a.method, .block a.current.method,
.content span.tymethod, .content a.tymethod, .block a.current.tymethod,
| clean::ConstantItem(..)
| clean::UnionItem(..)
| clean::AssociatedConstItem(..)
+ | clean::TraitAliasItem(..)
| clean::ForeignTypeItem => {
if i.def_id.is_local() {
if !self.access_levels.is_exported(i.def_id) {
};
om.traits.push(t);
},
- hir::ItemKind::TraitAlias(..) => {
- unimplemented!("trait objects are not yet implemented")
+ hir::ItemKind::TraitAlias(ref gen, ref b) => {
+ let t = TraitAlias {
+ name: ident.name,
+ generics: gen.clone(),
+ bounds: b.iter().cloned().collect(),
+ id: item.id,
+ attrs: item.attrs.clone(),
+ whence: item.span,
+ vis: item.vis.clone(),
+ stab: self.stability(item.id),
+ depr: self.deprecation(item.id),
+ };
+ om.trait_aliases.push(t);
},
hir::ItemKind::Impl(unsafety,
authors = ["The Rust Project Developers"]
name = "serialize"
version = "0.0.0"
+edition = "2018"
[lib]
name = "serialize"
use std::hash::{Hash, BuildHasher};
-use {Decodable, Encodable, Decoder, Encoder};
+use crate::{Decodable, Encodable, Decoder, Encoder};
use std::collections::{LinkedList, VecDeque, BTreeMap, BTreeSet, HashMap, HashSet};
use std::rc::Rc;
use std::sync::Arc;
}
impl fmt::Display for FromHexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
InvalidHexCharacter(ch, idx) =>
write!(f, "Invalid character '{}' at position {}", ch, idx),
#[cfg(test)]
mod tests {
extern crate test;
- use self::test::Bencher;
- use hex::{FromHex, ToHex};
+ use test::Bencher;
+ use crate::hex::{FromHex, ToHex};
#[test]
pub fn test_to_hex() {
use std::str::FromStr;
use std::string;
use std::{char, f64, fmt, str};
-use std;
-use Encodable;
+use crate::Encodable;
/// Represents a json value
#[derive(Clone, PartialEq, PartialOrd, Debug)]
pub struct PrettyJson<'a> { inner: &'a Json }
-pub struct AsJson<'a, T: 'a> { inner: &'a T }
-pub struct AsPrettyJson<'a, T: 'a> { inner: &'a T, indent: Option<usize> }
+pub struct AsJson<'a, T> { inner: &'a T }
+pub struct AsPrettyJson<'a, T> { inner: &'a T, indent: Option<usize> }
/// The errors that can arise while parsing a JSON stream.
#[derive(Clone, Copy, PartialEq, Debug)]
}
/// Shortcut function to decode a JSON `&str` into an object
-pub fn decode<T: ::Decodable>(s: &str) -> DecodeResult<T> {
+pub fn decode<T: crate::Decodable>(s: &str) -> DecodeResult<T> {
let json = match from_str(s) {
Ok(x) => x,
Err(e) => return Err(ParseError(e))
};
let mut decoder = Decoder::new(json);
- ::Decodable::decode(&mut decoder)
+ crate::Decodable::decode(&mut decoder)
}
/// Shortcut function to encode a `T` into a JSON `String`
-pub fn encode<T: ::Encodable>(object: &T) -> Result<string::String, EncoderError> {
+pub fn encode<T: crate::Encodable>(object: &T) -> Result<string::String, EncoderError> {
let mut s = String::new();
{
let mut encoder = Encoder::new(&mut s);
}
impl fmt::Display for ErrorCode {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
error_str(*self).fmt(f)
}
}
}
impl fmt::Display for ParserError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
}
impl fmt::Display for DecoderError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
}
impl fmt::Display for EncoderError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
})
}
-impl<'a> ::Encoder for Encoder<'a> {
+impl<'a> crate::Encoder for Encoder<'a> {
type Error = EncoderError;
fn emit_unit(&mut self) -> EncodeResult {
}
}
-impl<'a> ::Encoder for PrettyEncoder<'a> {
+impl<'a> crate::Encoder for PrettyEncoder<'a> {
type Error = EncoderError;
fn emit_unit(&mut self) -> EncodeResult {
}
impl Encodable for Json {
- fn encode<E: ::Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
+ fn encode<E: crate::Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
match *self {
Json::I64(v) => v.encode(e),
Json::U64(v) => v.encode(e),
/// Creates an `AsJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
-pub fn as_json<T>(t: &T) -> AsJson<T> {
+pub fn as_json<T>(t: &T) -> AsJson<'_, T> {
AsJson { inner: t }
}
/// Creates an `AsPrettyJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
-pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<T> {
+pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<'_, T> {
AsPrettyJson { inner: t, indent: None }
}
impl Json {
/// Borrow this json object as a pretty object to generate a pretty
/// representation for it via `Display`.
- pub fn pretty(&self) -> PrettyJson {
+ pub fn pretty(&self) -> PrettyJson<'_> {
PrettyJson { inner: self }
}
/// Provides access to the StackElement at a given index.
/// lower indices are at the bottom of the stack while higher indices are
/// at the top.
- pub fn get(&self, idx: usize) -> StackElement {
+ pub fn get(&self, idx: usize) -> StackElement<'_> {
match self.stack[idx] {
InternalIndex(i) => StackElement::Index(i),
InternalKey(start, size) => {
}
}
- /// Compares this stack with an array of StackElements.
- pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool {
+ /// Compares this stack with an array of StackElement<'_>s.
+ pub fn is_equal_to(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() != rhs.len() { return false; }
for (i, r) in rhs.iter().enumerate() {
if self.get(i) != *r { return false; }
/// Returns `true` if the bottom-most elements of this stack are the same as
/// the ones passed as parameter.
- pub fn starts_with(&self, rhs: &[StackElement]) -> bool {
+ pub fn starts_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() { return false; }
for (i, r) in rhs.iter().enumerate() {
if self.get(i) != *r { return false; }
/// Returns `true` if the top-most elements of this stack are the same as
/// the ones passed as parameter.
- pub fn ends_with(&self, rhs: &[StackElement]) -> bool {
+ pub fn ends_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() { return false; }
let offset = self.stack.len() - rhs.len();
for (i, r) in rhs.iter().enumerate() {
}
/// Returns the top-most element (if any).
- pub fn top(&self) -> Option<StackElement> {
+ pub fn top(&self) -> Option<StackElement<'_>> {
match self.stack.last() {
None => None,
Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
}
}
-impl ::Decoder for Decoder {
+impl crate::Decoder for Decoder {
type Error = DecoderError;
fn read_nil(&mut self) -> DecodeResult<()> {
Err(ExpectedError("single character string".to_owned(), s.to_string()))
}
- fn read_str(&mut self) -> DecodeResult<Cow<str>> {
+ fn read_str(&mut self) -> DecodeResult<Cow<'_, str>> {
expect!(self.pop(), String).map(Cow::Owned)
}
impl fmt::Display for Json {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = Encoder::new(&mut shim);
match self.encode(&mut encoder) {
impl<'a> fmt::Display for PrettyJson<'a> {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = PrettyEncoder::new(&mut shim);
match self.inner.encode(&mut encoder) {
impl<'a, T: Encodable> fmt::Display for AsJson<'a, T> {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = Encoder::new(&mut shim);
match self.inner.encode(&mut encoder) {
impl<'a, T: Encodable> fmt::Display for AsPrettyJson<'a, T> {
/// Encodes a json value into a string
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = PrettyEncoder::new(&mut shim);
if let Some(n) = self.indent {
#[cfg(test)]
mod tests {
+ // Benchmarks and tests that require private items
+
extern crate test;
- use self::Animal::*;
- use self::test::Bencher;
- use {Encodable, Decodable};
- use super::Json::*;
- use super::ErrorCode::*;
- use super::ParserError::*;
- use super::DecoderError::*;
- use super::JsonEvent::*;
- use super::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser,
- StackElement, Stack, Decoder, Encoder, EncoderError};
- use std::{i64, u64, f32, f64};
- use std::io::prelude::*;
- use std::collections::BTreeMap;
+ use test::Bencher;
+ use super::{from_str, Parser, StackElement, Stack};
use std::string;
- #[derive(RustcDecodable, Eq, PartialEq, Debug)]
- struct OptionData {
- opt: Option<usize>,
- }
-
- #[test]
- fn test_decode_option_none() {
- let s ="{}";
- let obj: OptionData = super::decode(s).unwrap();
- assert_eq!(obj, OptionData { opt: None });
- }
-
- #[test]
- fn test_decode_option_some() {
- let s = "{ \"opt\": 10 }";
- let obj: OptionData = super::decode(s).unwrap();
- assert_eq!(obj, OptionData { opt: Some(10) });
- }
-
- #[test]
- fn test_decode_option_malformed() {
- check_err::<OptionData>("{ \"opt\": [] }",
- ExpectedError("Number".to_string(), "[]".to_string()));
- check_err::<OptionData>("{ \"opt\": false }",
- ExpectedError("Number".to_string(), "false".to_string()));
- }
-
- #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
- enum Animal {
- Dog,
- Frog(string::String, isize)
- }
-
- #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
- struct Inner {
- a: (),
- b: usize,
- c: Vec<string::String>,
- }
-
- #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
- struct Outer {
- inner: Vec<Inner>,
- }
-
- fn mk_object(items: &[(string::String, Json)]) -> Json {
- let mut d = BTreeMap::new();
-
- for item in items {
- match *item {
- (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
- }
- };
-
- Object(d)
- }
-
- #[test]
- fn test_from_str_trait() {
- let s = "null";
- assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
- }
-
- #[test]
- fn test_write_null() {
- assert_eq!(Null.to_string(), "null");
- assert_eq!(Null.pretty().to_string(), "null");
- }
-
- #[test]
- fn test_write_i64() {
- assert_eq!(U64(0).to_string(), "0");
- assert_eq!(U64(0).pretty().to_string(), "0");
-
- assert_eq!(U64(1234).to_string(), "1234");
- assert_eq!(U64(1234).pretty().to_string(), "1234");
-
- assert_eq!(I64(-5678).to_string(), "-5678");
- assert_eq!(I64(-5678).pretty().to_string(), "-5678");
-
- assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
- assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
- }
-
- #[test]
- fn test_write_f64() {
- assert_eq!(F64(3.0).to_string(), "3.0");
- assert_eq!(F64(3.0).pretty().to_string(), "3.0");
-
- assert_eq!(F64(3.1).to_string(), "3.1");
- assert_eq!(F64(3.1).pretty().to_string(), "3.1");
-
- assert_eq!(F64(-1.5).to_string(), "-1.5");
- assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
-
- assert_eq!(F64(0.5).to_string(), "0.5");
- assert_eq!(F64(0.5).pretty().to_string(), "0.5");
-
- assert_eq!(F64(f64::NAN).to_string(), "null");
- assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
-
- assert_eq!(F64(f64::INFINITY).to_string(), "null");
- assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
-
- assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
- assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
- }
-
- #[test]
- fn test_write_str() {
- assert_eq!(String("".to_string()).to_string(), "\"\"");
- assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
-
- assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
- assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
- }
-
- #[test]
- fn test_write_bool() {
- assert_eq!(Boolean(true).to_string(), "true");
- assert_eq!(Boolean(true).pretty().to_string(), "true");
-
- assert_eq!(Boolean(false).to_string(), "false");
- assert_eq!(Boolean(false).pretty().to_string(), "false");
- }
-
- #[test]
- fn test_write_array() {
- assert_eq!(Array(vec![]).to_string(), "[]");
- assert_eq!(Array(vec![]).pretty().to_string(), "[]");
-
- assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
- assert_eq!(
- Array(vec![Boolean(true)]).pretty().to_string(),
- "\
- [\n \
- true\n\
- ]"
- );
-
- let long_test_array = Array(vec![
- Boolean(false),
- Null,
- Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
-
- assert_eq!(long_test_array.to_string(),
- "[false,null,[\"foo\\nbar\",3.5]]");
- assert_eq!(
- long_test_array.pretty().to_string(),
- "\
- [\n \
- false,\n \
- null,\n \
- [\n \
- \"foo\\nbar\",\n \
- 3.5\n \
- ]\n\
- ]"
- );
- }
-
- #[test]
- fn test_write_object() {
- assert_eq!(mk_object(&[]).to_string(), "{}");
- assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
-
- assert_eq!(
- mk_object(&[
- ("a".to_string(), Boolean(true))
- ]).to_string(),
- "{\"a\":true}"
- );
- assert_eq!(
- mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
- "\
- {\n \
- \"a\": true\n\
- }"
- );
-
- let complex_obj = mk_object(&[
- ("b".to_string(), Array(vec![
- mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
- mk_object(&[("d".to_string(), String("".to_string()))])
- ]))
- ]);
-
- assert_eq!(
- complex_obj.to_string(),
- "{\
- \"b\":[\
- {\"c\":\"\\f\\r\"},\
- {\"d\":\"\"}\
- ]\
- }"
- );
- assert_eq!(
- complex_obj.pretty().to_string(),
- "\
- {\n \
- \"b\": [\n \
- {\n \
- \"c\": \"\\f\\r\"\n \
- },\n \
- {\n \
- \"d\": \"\"\n \
- }\n \
- ]\n\
- }"
- );
-
- let a = mk_object(&[
- ("a".to_string(), Boolean(true)),
- ("b".to_string(), Array(vec![
- mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
- mk_object(&[("d".to_string(), String("".to_string()))])
- ]))
- ]);
-
- // We can't compare the strings directly because the object fields be
- // printed in a different order.
- assert_eq!(a.clone(), a.to_string().parse().unwrap());
- assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
- }
-
- #[test]
- fn test_write_enum() {
- let animal = Dog;
- assert_eq!(
- super::as_json(&animal).to_string(),
- "\"Dog\""
- );
- assert_eq!(
- super::as_pretty_json(&animal).to_string(),
- "\"Dog\""
- );
-
- let animal = Frog("Henry".to_string(), 349);
- assert_eq!(
- super::as_json(&animal).to_string(),
- "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
- );
- assert_eq!(
- super::as_pretty_json(&animal).to_string(),
- "{\n \
- \"variant\": \"Frog\",\n \
- \"fields\": [\n \
- \"Henry\",\n \
- 349\n \
- ]\n\
- }"
- );
- }
-
- macro_rules! check_encoder_for_simple {
- ($value:expr, $expected:expr) => ({
- let s = super::as_json(&$value).to_string();
- assert_eq!(s, $expected);
-
- let s = super::as_pretty_json(&$value).to_string();
- assert_eq!(s, $expected);
- })
- }
-
- #[test]
- fn test_write_some() {
- check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
- }
-
- #[test]
- fn test_write_none() {
- check_encoder_for_simple!(None::<string::String>, "null");
- }
-
- #[test]
- fn test_write_char() {
- check_encoder_for_simple!('a', "\"a\"");
- check_encoder_for_simple!('\t', "\"\\t\"");
- check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
- check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
- check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
- check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
- check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
- check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
- }
-
- #[test]
- fn test_trailing_characters() {
- assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
- assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2)));
- assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
- assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
- }
-
- #[test]
- fn test_read_identifiers() {
- assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3)));
-
- assert_eq!(from_str("null"), Ok(Null));
- assert_eq!(from_str("true"), Ok(Boolean(true)));
- assert_eq!(from_str("false"), Ok(Boolean(false)));
- assert_eq!(from_str(" null "), Ok(Null));
- assert_eq!(from_str(" true "), Ok(Boolean(true)));
- assert_eq!(from_str(" false "), Ok(Boolean(false)));
- }
-
- #[test]
- fn test_decode_identifiers() {
- let v: () = super::decode("null").unwrap();
- assert_eq!(v, ());
-
- let v: bool = super::decode("true").unwrap();
- assert_eq!(v, true);
-
- let v: bool = super::decode("false").unwrap();
- assert_eq!(v, false);
- }
-
- #[test]
- fn test_read_number() {
- assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
- assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1)));
- assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
- assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2)));
- assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2)));
- assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3)));
- assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3)));
- assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
-
- assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
- assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
-
- assert_eq!(from_str("3"), Ok(U64(3)));
- assert_eq!(from_str("3.1"), Ok(F64(3.1)));
- assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
- assert_eq!(from_str("0.4"), Ok(F64(0.4)));
- assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
- assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
- assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
- assert_eq!(from_str(" 3 "), Ok(U64(3)));
-
- assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
- assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
- assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
- }
-
- #[test]
- fn test_decode_numbers() {
- let v: f64 = super::decode("3").unwrap();
- assert_eq!(v, 3.0);
-
- let v: f64 = super::decode("3.1").unwrap();
- assert_eq!(v, 3.1);
-
- let v: f64 = super::decode("-1.2").unwrap();
- assert_eq!(v, -1.2);
-
- let v: f64 = super::decode("0.4").unwrap();
- assert_eq!(v, 0.4);
-
- let v: f64 = super::decode("0.4e5").unwrap();
- assert_eq!(v, 0.4e5);
-
- let v: f64 = super::decode("0.4e15").unwrap();
- assert_eq!(v, 0.4e15);
-
- let v: f64 = super::decode("0.4e-01").unwrap();
- assert_eq!(v, 0.4e-01);
-
- let v: u64 = super::decode("0").unwrap();
- assert_eq!(v, 0);
-
- let v: u64 = super::decode("18446744073709551615").unwrap();
- assert_eq!(v, u64::MAX);
-
- let v: i64 = super::decode("-9223372036854775808").unwrap();
- assert_eq!(v, i64::MIN);
-
- let v: i64 = super::decode("9223372036854775807").unwrap();
- assert_eq!(v, i64::MAX);
-
- let res: DecodeResult<i64> = super::decode("765.25");
- assert_eq!(res, Err(ExpectedError("Integer".to_string(),
- "765.25".to_string())));
- }
-
- #[test]
- fn test_read_str() {
- assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
- assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
-
- assert_eq!(from_str("\"\""), Ok(String("".to_string())));
- assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
- assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
- assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
- assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
- assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
- assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
- assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
- assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
- assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
- }
-
- #[test]
- fn test_decode_str() {
- let s = [("\"\"", ""),
- ("\"foo\"", "foo"),
- ("\"\\\"\"", "\""),
- ("\"\\b\"", "\x08"),
- ("\"\\n\"", "\n"),
- ("\"\\r\"", "\r"),
- ("\"\\t\"", "\t"),
- ("\"\\u12ab\"", "\u{12ab}"),
- ("\"\\uAB12\"", "\u{AB12}")];
-
- for &(i, o) in &s {
- let v: string::String = super::decode(i).unwrap();
- assert_eq!(v, o);
- }
- }
-
- #[test]
- fn test_read_array() {
- assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
- assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
- assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
- assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
-
- assert_eq!(from_str("[]"), Ok(Array(vec![])));
- assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
- assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
- assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
- assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
- assert_eq!(from_str("[3, 1]"),
- Ok(Array(vec![U64(3), U64(1)])));
- assert_eq!(from_str("\n[3, 2]\n"),
- Ok(Array(vec![U64(3), U64(2)])));
- assert_eq!(from_str("[2, [4, 1]]"),
- Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
- }
-
- #[test]
- fn test_decode_array() {
- let v: Vec<()> = super::decode("[]").unwrap();
- assert_eq!(v, []);
-
- let v: Vec<()> = super::decode("[null]").unwrap();
- assert_eq!(v, [()]);
-
- let v: Vec<bool> = super::decode("[true]").unwrap();
- assert_eq!(v, [true]);
-
- let v: Vec<isize> = super::decode("[3, 1]").unwrap();
- assert_eq!(v, [3, 1]);
-
- let v: Vec<Vec<usize>> = super::decode("[[3], [1, 2]]").unwrap();
- assert_eq!(v, [vec![3], vec![1, 2]]);
- }
-
- #[test]
- fn test_decode_tuple() {
- let t: (usize, usize, usize) = super::decode("[1, 2, 3]").unwrap();
- assert_eq!(t, (1, 2, 3));
-
- let t: (usize, string::String) = super::decode("[1, \"two\"]").unwrap();
- assert_eq!(t, (1, "two".to_string()));
- }
-
- #[test]
- fn test_decode_tuple_malformed_types() {
- assert!(super::decode::<(usize, string::String)>("[1, 2]").is_err());
- }
-
- #[test]
- fn test_decode_tuple_malformed_length() {
- assert!(super::decode::<(usize, usize)>("[1, 2, 3]").is_err());
- }
-
- #[test]
- fn test_read_object() {
- assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
- assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
- assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2)));
- assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
- assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
- assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
-
- assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6)));
- assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6)));
- assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
- assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8)));
- assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
-
- assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
- assert_eq!(from_str("{\"a\": 3}").unwrap(),
- mk_object(&[("a".to_string(), U64(3))]));
-
- assert_eq!(from_str(
- "{ \"a\": null, \"b\" : true }").unwrap(),
- mk_object(&[
- ("a".to_string(), Null),
- ("b".to_string(), Boolean(true))]));
- assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
- mk_object(&[
- ("a".to_string(), Null),
- ("b".to_string(), Boolean(true))]));
- assert_eq!(from_str(
- "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
- mk_object(&[
- ("a".to_string(), F64(1.0)),
- ("b".to_string(), Array(vec![Boolean(true)]))
- ]));
- assert_eq!(from_str(
- "{\
- \"a\": 1.0, \
- \"b\": [\
- true,\
- \"foo\\nbar\", \
- { \"c\": {\"d\": null} } \
- ]\
- }").unwrap(),
- mk_object(&[
- ("a".to_string(), F64(1.0)),
- ("b".to_string(), Array(vec![
- Boolean(true),
- String("foo\nbar".to_string()),
- mk_object(&[
- ("c".to_string(), mk_object(&[("d".to_string(), Null)]))
- ])
- ]))
- ]));
- }
-
- #[test]
- fn test_decode_struct() {
- let s = "{
- \"inner\": [
- { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
- ]
- }";
-
- let v: Outer = super::decode(s).unwrap();
- assert_eq!(
- v,
- Outer {
- inner: vec![
- Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
- ]
- }
- );
- }
-
- #[derive(RustcDecodable)]
- struct FloatStruct {
- f: f64,
- a: Vec<f64>
- }
- #[test]
- fn test_decode_struct_with_nan() {
- let s = "{\"f\":null,\"a\":[null,123]}";
- let obj: FloatStruct = super::decode(s).unwrap();
- assert!(obj.f.is_nan());
- assert!(obj.a[0].is_nan());
- assert_eq!(obj.a[1], 123f64);
- }
-
- #[test]
- fn test_decode_option() {
- let value: Option<string::String> = super::decode("null").unwrap();
- assert_eq!(value, None);
-
- let value: Option<string::String> = super::decode("\"jodhpurs\"").unwrap();
- assert_eq!(value, Some("jodhpurs".to_string()));
- }
-
- #[test]
- fn test_decode_enum() {
- let value: Animal = super::decode("\"Dog\"").unwrap();
- assert_eq!(value, Dog);
-
- let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
- let value: Animal = super::decode(s).unwrap();
- assert_eq!(value, Frog("Henry".to_string(), 349));
- }
-
- #[test]
- fn test_decode_map() {
- let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
- \"fields\":[\"Henry\", 349]}}";
- let mut map: BTreeMap<string::String, Animal> = super::decode(s).unwrap();
-
- assert_eq!(map.remove(&"a".to_string()), Some(Dog));
- assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
- }
-
- #[test]
- fn test_multiline_errors() {
- assert_eq!(from_str("{\n \"foo\":\n \"bar\""),
- Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
- }
-
- #[derive(RustcDecodable)]
- #[allow(dead_code)]
- struct DecodeStruct {
- x: f64,
- y: bool,
- z: string::String,
- w: Vec<DecodeStruct>
- }
- #[derive(RustcDecodable)]
- enum DecodeEnum {
- A(f64),
- B(string::String)
- }
- fn check_err<T: Decodable>(to_parse: &'static str, expected: DecoderError) {
- let res: DecodeResult<T> = match from_str(to_parse) {
- Err(e) => Err(ParseError(e)),
- Ok(json) => Decodable::decode(&mut Decoder::new(json))
- };
- match res {
- Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`",
- to_parse, expected),
- Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}",
- to_parse, e),
- Err(e) => {
- assert_eq!(e, expected);
- }
- }
- }
- #[test]
- fn test_decode_errors_struct() {
- check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
- check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
- ExpectedError("Number".to_string(), "true".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
- ExpectedError("Boolean".to_string(), "[]".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
- ExpectedError("String".to_string(), "{}".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
- ExpectedError("Array".to_string(), "null".to_string()));
- check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
- MissingFieldError("w".to_string()));
- }
- #[test]
- fn test_decode_errors_enum() {
- check_err::<DecodeEnum>("{}",
- MissingFieldError("variant".to_string()));
- check_err::<DecodeEnum>("{\"variant\": 1}",
- ExpectedError("String".to_string(), "1".to_string()));
- check_err::<DecodeEnum>("{\"variant\": \"A\"}",
- MissingFieldError("fields".to_string()));
- check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
- ExpectedError("Array".to_string(), "null".to_string()));
- check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
- UnknownVariantError("C".to_string()));
- }
-
- #[test]
- fn test_find(){
- let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
- let found_str = json_value.find("dog");
- assert!(found_str.unwrap().as_string().unwrap() == "cat");
- }
-
- #[test]
- fn test_find_path(){
- let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
- let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
- assert!(found_str.unwrap().as_string().unwrap() == "cheese");
- }
-
- #[test]
- fn test_search(){
- let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
- let found_str = json_value.search("mouse").and_then(|j| j.as_string());
- assert!(found_str.unwrap() == "cheese");
- }
-
- #[test]
- fn test_index(){
- let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
- let ref array = json_value["animals"];
- assert_eq!(array[0].as_string().unwrap(), "dog");
- assert_eq!(array[1].as_string().unwrap(), "cat");
- assert_eq!(array[2].as_string().unwrap(), "mouse");
- }
-
- #[test]
- fn test_is_object(){
- let json_value = from_str("{}").unwrap();
- assert!(json_value.is_object());
- }
-
- #[test]
- fn test_as_object(){
- let json_value = from_str("{}").unwrap();
- let json_object = json_value.as_object();
- assert!(json_object.is_some());
- }
-
- #[test]
- fn test_is_array(){
- let json_value = from_str("[1, 2, 3]").unwrap();
- assert!(json_value.is_array());
- }
-
- #[test]
- fn test_as_array(){
- let json_value = from_str("[1, 2, 3]").unwrap();
- let json_array = json_value.as_array();
- let expected_length = 3;
- assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
- }
-
- #[test]
- fn test_is_string(){
- let json_value = from_str("\"dog\"").unwrap();
- assert!(json_value.is_string());
- }
-
- #[test]
- fn test_as_string(){
- let json_value = from_str("\"dog\"").unwrap();
- let json_str = json_value.as_string();
- let expected_str = "dog";
- assert_eq!(json_str, Some(expected_str));
- }
-
- #[test]
- fn test_is_number(){
- let json_value = from_str("12").unwrap();
- assert!(json_value.is_number());
- }
-
- #[test]
- fn test_is_i64(){
- let json_value = from_str("-12").unwrap();
- assert!(json_value.is_i64());
-
- let json_value = from_str("12").unwrap();
- assert!(!json_value.is_i64());
-
- let json_value = from_str("12.0").unwrap();
- assert!(!json_value.is_i64());
- }
-
- #[test]
- fn test_is_u64(){
- let json_value = from_str("12").unwrap();
- assert!(json_value.is_u64());
-
- let json_value = from_str("-12").unwrap();
- assert!(!json_value.is_u64());
-
- let json_value = from_str("12.0").unwrap();
- assert!(!json_value.is_u64());
- }
-
- #[test]
- fn test_is_f64(){
- let json_value = from_str("12").unwrap();
- assert!(!json_value.is_f64());
-
- let json_value = from_str("-12").unwrap();
- assert!(!json_value.is_f64());
-
- let json_value = from_str("12.0").unwrap();
- assert!(json_value.is_f64());
-
- let json_value = from_str("-12.0").unwrap();
- assert!(json_value.is_f64());
- }
-
- #[test]
- fn test_as_i64(){
- let json_value = from_str("-12").unwrap();
- let json_num = json_value.as_i64();
- assert_eq!(json_num, Some(-12));
- }
-
- #[test]
- fn test_as_u64(){
- let json_value = from_str("12").unwrap();
- let json_num = json_value.as_u64();
- assert_eq!(json_num, Some(12));
- }
-
- #[test]
- fn test_as_f64(){
- let json_value = from_str("12.0").unwrap();
- let json_num = json_value.as_f64();
- assert_eq!(json_num, Some(12f64));
- }
-
- #[test]
- fn test_is_boolean(){
- let json_value = from_str("false").unwrap();
- assert!(json_value.is_boolean());
- }
-
- #[test]
- fn test_as_boolean(){
- let json_value = from_str("false").unwrap();
- let json_bool = json_value.as_boolean();
- let expected_bool = false;
- assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
- }
-
- #[test]
- fn test_is_null(){
- let json_value = from_str("null").unwrap();
- assert!(json_value.is_null());
- }
-
- #[test]
- fn test_as_null(){
- let json_value = from_str("null").unwrap();
- let json_null = json_value.as_null();
- let expected_null = ();
- assert!(json_null.is_some() && json_null.unwrap() == expected_null);
- }
-
- #[test]
- fn test_encode_hashmap_with_numeric_key() {
- use std::str::from_utf8;
- use std::collections::HashMap;
- let mut hm: HashMap<usize, bool> = HashMap::new();
- hm.insert(1, true);
- let mut mem_buf = Vec::new();
- write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(&mem_buf[..]).unwrap();
- match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- _ => {} // it parsed and we are good to go
- }
- }
-
- #[test]
- fn test_prettyencode_hashmap_with_numeric_key() {
- use std::str::from_utf8;
- use std::collections::HashMap;
- let mut hm: HashMap<usize, bool> = HashMap::new();
- hm.insert(1, true);
- let mut mem_buf = Vec::new();
- write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
- let json_str = from_utf8(&mem_buf[..]).unwrap();
- match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- _ => {} // it parsed and we are good to go
- }
- }
-
- #[test]
- fn test_prettyencoder_indent_level_param() {
- use std::str::from_utf8;
- use std::collections::BTreeMap;
-
- let mut tree = BTreeMap::new();
-
- tree.insert("hello".to_string(), String("guten tag".to_string()));
- tree.insert("goodbye".to_string(), String("sayonara".to_string()));
-
- let json = Array(
- // The following layout below should look a lot like
- // the pretty-printed JSON (indent * x)
- vec!
- ( // 0x
- String("greetings".to_string()), // 1x
- Object(tree), // 1x + 2x + 2x + 1x
- ) // 0x
- // End JSON array (7 lines)
- );
-
- // Helper function for counting indents
- fn indents(source: &str) -> usize {
- let trimmed = source.trim_start_matches(' ');
- source.len() - trimmed.len()
- }
-
- // Test up to 4 spaces of indents (more?)
- for i in 0..4 {
- let mut writer = Vec::new();
- write!(&mut writer, "{}",
- super::as_pretty_json(&json).indent(i)).unwrap();
-
- let printed = from_utf8(&writer[..]).unwrap();
-
- // Check for indents at each line
- let lines: Vec<&str> = printed.lines().collect();
- assert_eq!(lines.len(), 7); // JSON should be 7 lines
-
- assert_eq!(indents(lines[0]), 0 * i); // [
- assert_eq!(indents(lines[1]), 1 * i); // "greetings",
- assert_eq!(indents(lines[2]), 1 * i); // {
- assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag",
- assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara"
- assert_eq!(indents(lines[5]), 1 * i); // },
- assert_eq!(indents(lines[6]), 0 * i); // ]
-
- // Finally, test that the pretty-printed JSON is valid
- from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
- }
- }
-
- #[test]
- fn test_hashmap_with_enum_key() {
- use std::collections::HashMap;
- use json;
- #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)]
- enum Enum {
- Foo,
- #[allow(dead_code)]
- Bar,
- }
- let mut map = HashMap::new();
- map.insert(Enum::Foo, 0);
- let result = json::encode(&map).unwrap();
- assert_eq!(&result[..], r#"{"Foo":0}"#);
- let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
- assert_eq!(map, decoded);
- }
-
- #[test]
- fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
- use std::collections::HashMap;
- use Decodable;
- let json_str = "{\"1\":true}";
- let json_obj = match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- Ok(o) => o
- };
- let mut decoder = Decoder::new(json_obj);
- let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap();
- }
-
- #[test]
- fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
- use std::collections::HashMap;
- use Decodable;
- let json_str = "{\"a\":true}";
- let json_obj = match from_str(json_str) {
- Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
- Ok(o) => o
- };
- let mut decoder = Decoder::new(json_obj);
- let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder);
- assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
- }
-
- fn assert_stream_equal(src: &str,
- expected: Vec<(JsonEvent, Vec<StackElement>)>) {
- let mut parser = Parser::new(src.chars());
- let mut i = 0;
- loop {
- let evt = match parser.next() {
- Some(e) => e,
- None => { break; }
- };
- let (ref expected_evt, ref expected_stack) = expected[i];
- if !parser.stack().is_equal_to(expected_stack) {
- panic!("Parser stack is not equal to {:?}", expected_stack);
- }
- assert_eq!(&evt, expected_evt);
- i+=1;
- }
- }
- #[test]
- fn test_streaming_parser() {
- assert_stream_equal(
- r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
- vec![
- (ObjectStart, vec![]),
- (StringValue("bar".to_string()), vec![StackElement::Key("foo")]),
- (ArrayStart, vec![StackElement::Key("array")]),
- (U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]),
- (U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]),
- (U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]),
- (U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]),
- (U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]),
- (U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]),
- (ArrayEnd, vec![StackElement::Key("array")]),
- (ArrayStart, vec![StackElement::Key("idents")]),
- (NullValue, vec![StackElement::Key("idents"),
- StackElement::Index(0)]),
- (BooleanValue(true), vec![StackElement::Key("idents"),
- StackElement::Index(1)]),
- (BooleanValue(false), vec![StackElement::Key("idents"),
- StackElement::Index(2)]),
- (ArrayEnd, vec![StackElement::Key("idents")]),
- (ObjectEnd, vec![]),
- ]
- );
- }
- fn last_event(src: &str) -> JsonEvent {
- let mut parser = Parser::new(src.chars());
- let mut evt = NullValue;
- loop {
- evt = match parser.next() {
- Some(e) => e,
- None => return evt,
- }
- }
- }
-
- #[test]
- fn test_read_object_streaming() {
- assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
- assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2)));
- assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
- assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
- assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
-
- assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6)));
- assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6)));
- assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
- assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8)));
- assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
- assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
-
- assert_stream_equal(
- "{}",
- vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]
- );
- assert_stream_equal(
- "{\"a\": 3}",
- vec![
- (ObjectStart, vec![]),
- (U64Value(3), vec![StackElement::Key("a")]),
- (ObjectEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "{ \"a\": null, \"b\" : true }",
- vec![
- (ObjectStart, vec![]),
- (NullValue, vec![StackElement::Key("a")]),
- (BooleanValue(true), vec![StackElement::Key("b")]),
- (ObjectEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "{\"a\" : 1.0 ,\"b\": [ true ]}",
- vec![
- (ObjectStart, vec![]),
- (F64Value(1.0), vec![StackElement::Key("a")]),
- (ArrayStart, vec![StackElement::Key("b")]),
- (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]),
- (ArrayEnd, vec![StackElement::Key("b")]),
- (ObjectEnd, vec![]),
- ]
- );
- assert_stream_equal(
- r#"{
- "a": 1.0,
- "b": [
- true,
- "foo\nbar",
- { "c": {"d": null} }
- ]
- }"#,
- vec![
- (ObjectStart, vec![]),
- (F64Value(1.0), vec![StackElement::Key("a")]),
- (ArrayStart, vec![StackElement::Key("b")]),
- (BooleanValue(true), vec![StackElement::Key("b"),
- StackElement::Index(0)]),
- (StringValue("foo\nbar".to_string()), vec![StackElement::Key("b"),
- StackElement::Index(1)]),
- (ObjectStart, vec![StackElement::Key("b"),
- StackElement::Index(2)]),
- (ObjectStart, vec![StackElement::Key("b"),
- StackElement::Index(2),
- StackElement::Key("c")]),
- (NullValue, vec![StackElement::Key("b"),
- StackElement::Index(2),
- StackElement::Key("c"),
- StackElement::Key("d")]),
- (ObjectEnd, vec![StackElement::Key("b"),
- StackElement::Index(2),
- StackElement::Key("c")]),
- (ObjectEnd, vec![StackElement::Key("b"),
- StackElement::Index(2)]),
- (ArrayEnd, vec![StackElement::Key("b")]),
- (ObjectEnd, vec![]),
- ]
- );
- }
- #[test]
- fn test_read_array_streaming() {
- assert_stream_equal(
- "[]",
- vec![
- (ArrayStart, vec![]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[ ]",
- vec![
- (ArrayStart, vec![]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[true]",
- vec![
- (ArrayStart, vec![]),
- (BooleanValue(true), vec![StackElement::Index(0)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[ false ]",
- vec![
- (ArrayStart, vec![]),
- (BooleanValue(false), vec![StackElement::Index(0)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[null]",
- vec![
- (ArrayStart, vec![]),
- (NullValue, vec![StackElement::Index(0)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[3, 1]",
- vec![
- (ArrayStart, vec![]),
- (U64Value(3), vec![StackElement::Index(0)]),
- (U64Value(1), vec![StackElement::Index(1)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "\n[3, 2]\n",
- vec![
- (ArrayStart, vec![]),
- (U64Value(3), vec![StackElement::Index(0)]),
- (U64Value(2), vec![StackElement::Index(1)]),
- (ArrayEnd, vec![]),
- ]
- );
- assert_stream_equal(
- "[2, [4, 1]]",
- vec![
- (ArrayStart, vec![]),
- (U64Value(2), vec![StackElement::Index(0)]),
- (ArrayStart, vec![StackElement::Index(1)]),
- (U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]),
- (U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]),
- (ArrayEnd, vec![StackElement::Index(1)]),
- (ArrayEnd, vec![]),
- ]
- );
-
- assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2)));
-
- assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
- assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
- assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
- assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
-
- }
- #[test]
- fn test_trailing_characters_streaming() {
- assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5)));
- assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
- assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2)));
- assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
- assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
- }
- #[test]
- fn test_read_identifiers_streaming() {
- assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
- assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
- assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
-
- assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
- assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2)));
- assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3)));
- }
-
#[test]
fn test_stack() {
let mut stack = Stack::new();
assert!(stack.get(1) == StackElement::Key("foo"));
}
- #[test]
- fn test_to_json() {
- use std::collections::{HashMap,BTreeMap};
- use super::ToJson;
-
- let array2 = Array(vec![U64(1), U64(2)]);
- let array3 = Array(vec![U64(1), U64(2), U64(3)]);
- let object = {
- let mut tree_map = BTreeMap::new();
- tree_map.insert("a".to_string(), U64(1));
- tree_map.insert("b".to_string(), U64(2));
- Object(tree_map)
- };
-
- assert_eq!(array2.to_json(), array2);
- assert_eq!(object.to_json(), object);
- assert_eq!(3_isize.to_json(), I64(3));
- assert_eq!(4_i8.to_json(), I64(4));
- assert_eq!(5_i16.to_json(), I64(5));
- assert_eq!(6_i32.to_json(), I64(6));
- assert_eq!(7_i64.to_json(), I64(7));
- assert_eq!(8_usize.to_json(), U64(8));
- assert_eq!(9_u8.to_json(), U64(9));
- assert_eq!(10_u16.to_json(), U64(10));
- assert_eq!(11_u32.to_json(), U64(11));
- assert_eq!(12_u64.to_json(), U64(12));
- assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
- assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
- assert_eq!(().to_json(), Null);
- assert_eq!(f32::INFINITY.to_json(), Null);
- assert_eq!(f64::NAN.to_json(), Null);
- assert_eq!(true.to_json(), Boolean(true));
- assert_eq!(false.to_json(), Boolean(false));
- assert_eq!("abc".to_json(), String("abc".to_string()));
- assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
- assert_eq!((1_usize, 2_usize).to_json(), array2);
- assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
- assert_eq!([1_usize, 2_usize].to_json(), array2);
- assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
- assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
- assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
- let mut tree_map = BTreeMap::new();
- tree_map.insert("a".to_string(), 1 as usize);
- tree_map.insert("b".to_string(), 2);
- assert_eq!(tree_map.to_json(), object);
- let mut hash_map = HashMap::new();
- hash_map.insert("a".to_string(), 1 as usize);
- hash_map.insert("b".to_string(), 2);
- assert_eq!(hash_map.to_json(), object);
- assert_eq!(Some(15).to_json(), I64(15));
- assert_eq!(Some(15 as usize).to_json(), U64(15));
- assert_eq!(None::<isize>.to_json(), Null);
- }
-
- #[test]
- fn test_encode_hashmap_with_arbitrary_key() {
- use std::collections::HashMap;
- #[derive(PartialEq, Eq, Hash, RustcEncodable)]
- struct ArbitraryType(usize);
- let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
- hm.insert(ArbitraryType(1), true);
- let mut mem_buf = string::String::new();
- let mut encoder = Encoder::new(&mut mem_buf);
- let result = hm.encode(&mut encoder);
- match result.unwrap_err() {
- EncoderError::BadHashmapKey => (),
- _ => panic!("expected bad hash map key")
- }
- }
-
#[bench]
fn bench_streaming_small(b: &mut Bencher) {
b.iter( || {
html_playground_url = "https://play.rust-lang.org/",
test(attr(allow(unused_variables), deny(warnings))))]
+#![deny(rust_2018_idioms)]
+
#![feature(box_syntax)]
#![feature(core_intrinsics)]
#![feature(specialization)]
pub use self::serialize::{SpecializationError, SpecializedEncoder, SpecializedDecoder};
pub use self::serialize::{UseSpecializedEncodable, UseSpecializedDecodable};
-extern crate smallvec;
-
mod serialize;
mod collection_impls;
pub mod opaque;
pub mod leb128;
-
-mod rustc_serialize {
- pub use serialize::*;
-}
-use leb128::{self, read_signed_leb128, write_signed_leb128};
+use crate::leb128::{self, read_signed_leb128, write_signed_leb128};
+use crate::serialize;
use std::borrow::Cow;
-use serialize;
// -----------------------------------------------------------------------------
// Encoder
}
#[inline]
- fn read_str(&mut self) -> Result<Cow<str>, Self::Error> {
+ fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error> {
let len = self.read_usize()?;
let s = ::std::str::from_utf8(&self.data[self.position..self.position + len]).unwrap();
self.position += len;
err.to_string()
}
}
-
-
-#[cfg(test)]
-mod tests {
- use serialize::{Encodable, Decodable};
- use std::fmt::Debug;
- use super::{Encoder, Decoder};
-
- #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
- struct Struct {
- a: (),
- b: u8,
- c: u16,
- d: u32,
- e: u64,
- f: usize,
-
- g: i8,
- h: i16,
- i: i32,
- j: i64,
- k: isize,
-
- l: char,
- m: String,
- n: f32,
- o: f64,
- p: bool,
- q: Option<u32>,
- }
-
-
- fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
- let mut encoder = Encoder::new(Vec::new());
-
- for value in &values {
- Encodable::encode(&value, &mut encoder).unwrap();
- }
-
- let data = encoder.into_inner();
- let mut decoder = Decoder::new(&data[..], 0);
-
- for value in values {
- let decoded = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(value, decoded);
- }
- }
-
- #[test]
- fn test_unit() {
- check_round_trip(vec![(), (), (), ()]);
- }
-
- #[test]
- fn test_u8() {
- let mut vec = vec![];
- for i in ::std::u8::MIN..::std::u8::MAX {
- vec.push(i);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_u16() {
- for i in ::std::u16::MIN..::std::u16::MAX {
- check_round_trip(vec![1, 2, 3, i, i, i]);
- }
- }
-
- #[test]
- fn test_u32() {
- check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
- }
-
- #[test]
- fn test_u64() {
- check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
- }
-
- #[test]
- fn test_usize() {
- check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
- }
-
- #[test]
- fn test_i8() {
- let mut vec = vec![];
- for i in ::std::i8::MIN..::std::i8::MAX {
- vec.push(i);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_i16() {
- for i in ::std::i16::MIN..::std::i16::MAX {
- check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
- }
- }
-
- #[test]
- fn test_i32() {
- check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
- }
-
- #[test]
- fn test_i64() {
- check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
- }
-
- #[test]
- fn test_isize() {
- check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
- }
-
- #[test]
- fn test_bool() {
- check_round_trip(vec![false, true, true, false, false]);
- }
-
- #[test]
- fn test_f32() {
- let mut vec = vec![];
- for i in -100..100 {
- vec.push((i as f32) / 3.0);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_f64() {
- let mut vec = vec![];
- for i in -100..100 {
- vec.push((i as f64) / 3.0);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_char() {
- let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
- check_round_trip(vec);
- }
-
- #[test]
- fn test_string() {
- let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
- "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
-
- check_round_trip(vec);
- }
-
- #[test]
- fn test_option() {
- check_round_trip(vec![Some(-1i8)]);
- check_round_trip(vec![Some(-2i16)]);
- check_round_trip(vec![Some(-3i32)]);
- check_round_trip(vec![Some(-4i64)]);
- check_round_trip(vec![Some(-5isize)]);
-
- let none_i8: Option<i8> = None;
- check_round_trip(vec![none_i8]);
-
- let none_i16: Option<i16> = None;
- check_round_trip(vec![none_i16]);
-
- let none_i32: Option<i32> = None;
- check_round_trip(vec![none_i32]);
-
- let none_i64: Option<i64> = None;
- check_round_trip(vec![none_i64]);
-
- let none_isize: Option<isize> = None;
- check_round_trip(vec![none_isize]);
- }
-
- #[test]
- fn test_struct() {
- check_round_trip(vec![Struct {
- a: (),
- b: 10,
- c: 11,
- d: 12,
- e: 13,
- f: 14,
-
- g: 15,
- h: 16,
- i: 17,
- j: 18,
- k: 19,
-
- l: 'x',
- m: "abc".to_string(),
- n: 20.5,
- o: 21.5,
- p: false,
- q: None,
- }]);
-
- check_round_trip(vec![Struct {
- a: (),
- b: 101,
- c: 111,
- d: 121,
- e: 131,
- f: 141,
-
- g: -15,
- h: -16,
- i: -17,
- j: -18,
- k: -19,
-
- l: 'y',
- m: "def".to_string(),
- n: -20.5,
- o: -21.5,
- p: true,
- q: Some(1234567),
- }]);
- }
-
- #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
- enum Enum {
- Variant1,
- Variant2(usize, f32),
- Variant3 {
- a: i32,
- b: char,
- c: bool,
- },
- }
-
- #[test]
- fn test_enum() {
- check_round_trip(vec![Enum::Variant1,
- Enum::Variant2(1, 2.5),
- Enum::Variant3 {
- a: 3,
- b: 'b',
- c: false,
- },
- Enum::Variant3 {
- a: -4,
- b: 'f',
- c: true,
- }]);
- }
-
- #[test]
- fn test_sequence() {
- let mut vec = vec![];
- for i in -100i64..100i64 {
- vec.push(i * 100000);
- }
-
- check_round_trip(vec![vec]);
- }
-
- #[test]
- fn test_hash_map() {
- use std::collections::HashMap;
- let mut map = HashMap::new();
- for i in -100i64..100i64 {
- map.insert(i * 100000, i * 10000);
- }
-
- check_round_trip(vec![map]);
- }
-
- #[test]
- fn test_tuples() {
- check_round_trip(vec![('x', (), false, 0.5f32)]);
- check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
- check_round_trip(vec![(-12i16, 11u8, 12usize)]);
- check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
- check_round_trip(vec![(String::new(), "some string".to_string())]);
- }
-}
fn read_f64(&mut self) -> Result<f64, Self::Error>;
fn read_f32(&mut self) -> Result<f32, Self::Error>;
fn read_char(&mut self) -> Result<char, Self::Error>;
- fn read_str(&mut self) -> Result<Cow<str>, Self::Error>;
+ fn read_str(&mut self) -> Result<Cow<'_, str>, Self::Error>;
// Compound types:
fn read_enum<T, F>(&mut self, _name: &str, f: F) -> Result<T, Self::Error>
--- /dev/null
+extern crate serialize as rustc_serialize;
+
+use rustc_serialize::{Encodable, Decodable};
+use rustc_serialize::json;
+use json::Json::*;
+use json::ErrorCode::*;
+use json::ParserError::*;
+use json::DecoderError::*;
+use json::JsonEvent::*;
+use json::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, StackElement,
+ Decoder, Encoder, EncoderError};
+
+use Animal::*;
+use std::{i64, u64, f32, f64};
+use std::io::prelude::*;
+use std::collections::BTreeMap;
+use std::string;
+
+#[derive(RustcDecodable, Eq, PartialEq, Debug)]
+struct OptionData {
+ opt: Option<usize>,
+}
+
+#[test]
+fn test_decode_option_none() {
+ let s ="{}";
+ let obj: OptionData = json::decode(s).unwrap();
+ assert_eq!(obj, OptionData { opt: None });
+}
+
+#[test]
+fn test_decode_option_some() {
+ let s = "{ \"opt\": 10 }";
+ let obj: OptionData = json::decode(s).unwrap();
+ assert_eq!(obj, OptionData { opt: Some(10) });
+}
+
+#[test]
+fn test_decode_option_malformed() {
+ check_err::<OptionData>("{ \"opt\": [] }",
+ ExpectedError("Number".to_string(), "[]".to_string()));
+ check_err::<OptionData>("{ \"opt\": false }",
+ ExpectedError("Number".to_string(), "false".to_string()));
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+enum Animal {
+ Dog,
+ Frog(string::String, isize)
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Inner {
+ a: (),
+ b: usize,
+ c: Vec<string::String>,
+}
+
+#[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)]
+struct Outer {
+ inner: Vec<Inner>,
+}
+
+fn mk_object(items: &[(string::String, Json)]) -> Json {
+ let mut d = BTreeMap::new();
+
+ for item in items {
+ match *item {
+ (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
+ }
+ };
+
+ Object(d)
+}
+
+#[test]
+fn test_from_str_trait() {
+ let s = "null";
+ assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
+}
+
+#[test]
+fn test_write_null() {
+ assert_eq!(Null.to_string(), "null");
+ assert_eq!(Null.pretty().to_string(), "null");
+}
+
+#[test]
+fn test_write_i64() {
+ assert_eq!(U64(0).to_string(), "0");
+ assert_eq!(U64(0).pretty().to_string(), "0");
+
+ assert_eq!(U64(1234).to_string(), "1234");
+ assert_eq!(U64(1234).pretty().to_string(), "1234");
+
+ assert_eq!(I64(-5678).to_string(), "-5678");
+ assert_eq!(I64(-5678).pretty().to_string(), "-5678");
+
+ assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
+ assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
+}
+
+#[test]
+fn test_write_f64() {
+ assert_eq!(F64(3.0).to_string(), "3.0");
+ assert_eq!(F64(3.0).pretty().to_string(), "3.0");
+
+ assert_eq!(F64(3.1).to_string(), "3.1");
+ assert_eq!(F64(3.1).pretty().to_string(), "3.1");
+
+ assert_eq!(F64(-1.5).to_string(), "-1.5");
+ assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
+
+ assert_eq!(F64(0.5).to_string(), "0.5");
+ assert_eq!(F64(0.5).pretty().to_string(), "0.5");
+
+ assert_eq!(F64(f64::NAN).to_string(), "null");
+ assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
+
+ assert_eq!(F64(f64::INFINITY).to_string(), "null");
+ assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
+
+ assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
+ assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
+}
+
+#[test]
+fn test_write_str() {
+ assert_eq!(String("".to_string()).to_string(), "\"\"");
+ assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
+
+ assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
+ assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
+}
+
+#[test]
+fn test_write_bool() {
+ assert_eq!(Boolean(true).to_string(), "true");
+ assert_eq!(Boolean(true).pretty().to_string(), "true");
+
+ assert_eq!(Boolean(false).to_string(), "false");
+ assert_eq!(Boolean(false).pretty().to_string(), "false");
+}
+
+#[test]
+fn test_write_array() {
+ assert_eq!(Array(vec![]).to_string(), "[]");
+ assert_eq!(Array(vec![]).pretty().to_string(), "[]");
+
+ assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
+ assert_eq!(
+ Array(vec![Boolean(true)]).pretty().to_string(),
+ "\
+ [\n \
+ true\n\
+ ]"
+ );
+
+ let long_test_array = Array(vec![
+ Boolean(false),
+ Null,
+ Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
+
+ assert_eq!(long_test_array.to_string(),
+ "[false,null,[\"foo\\nbar\",3.5]]");
+ assert_eq!(
+ long_test_array.pretty().to_string(),
+ "\
+ [\n \
+ false,\n \
+ null,\n \
+ [\n \
+ \"foo\\nbar\",\n \
+ 3.5\n \
+ ]\n\
+ ]"
+ );
+}
+
+#[test]
+fn test_write_object() {
+ assert_eq!(mk_object(&[]).to_string(), "{}");
+ assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
+
+ assert_eq!(
+ mk_object(&[
+ ("a".to_string(), Boolean(true))
+ ]).to_string(),
+ "{\"a\":true}"
+ );
+ assert_eq!(
+ mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
+ "\
+ {\n \
+ \"a\": true\n\
+ }"
+ );
+
+ let complex_obj = mk_object(&[
+ ("b".to_string(), Array(vec![
+ mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+ mk_object(&[("d".to_string(), String("".to_string()))])
+ ]))
+ ]);
+
+ assert_eq!(
+ complex_obj.to_string(),
+ "{\
+ \"b\":[\
+ {\"c\":\"\\f\\r\"},\
+ {\"d\":\"\"}\
+ ]\
+ }"
+ );
+ assert_eq!(
+ complex_obj.pretty().to_string(),
+ "\
+ {\n \
+ \"b\": [\n \
+ {\n \
+ \"c\": \"\\f\\r\"\n \
+ },\n \
+ {\n \
+ \"d\": \"\"\n \
+ }\n \
+ ]\n\
+ }"
+ );
+
+ let a = mk_object(&[
+ ("a".to_string(), Boolean(true)),
+ ("b".to_string(), Array(vec![
+ mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
+ mk_object(&[("d".to_string(), String("".to_string()))])
+ ]))
+ ]);
+
+ // We can't compare the strings directly because the object fields be
+ // printed in a different order.
+ assert_eq!(a.clone(), a.to_string().parse().unwrap());
+ assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
+}
+
+#[test]
+fn test_write_enum() {
+ let animal = Dog;
+ assert_eq!(
+ json::as_json(&animal).to_string(),
+ "\"Dog\""
+ );
+ assert_eq!(
+ json::as_pretty_json(&animal).to_string(),
+ "\"Dog\""
+ );
+
+ let animal = Frog("Henry".to_string(), 349);
+ assert_eq!(
+ json::as_json(&animal).to_string(),
+ "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
+ );
+ assert_eq!(
+ json::as_pretty_json(&animal).to_string(),
+ "{\n \
+ \"variant\": \"Frog\",\n \
+ \"fields\": [\n \
+ \"Henry\",\n \
+ 349\n \
+ ]\n\
+ }"
+ );
+}
+
+macro_rules! check_encoder_for_simple {
+ ($value:expr, $expected:expr) => ({
+ let s = json::as_json(&$value).to_string();
+ assert_eq!(s, $expected);
+
+ let s = json::as_pretty_json(&$value).to_string();
+ assert_eq!(s, $expected);
+ })
+}
+
+#[test]
+fn test_write_some() {
+ check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
+}
+
+#[test]
+fn test_write_none() {
+ check_encoder_for_simple!(None::<string::String>, "null");
+}
+
+#[test]
+fn test_write_char() {
+ check_encoder_for_simple!('a', "\"a\"");
+ check_encoder_for_simple!('\t', "\"\\t\"");
+ check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
+ check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
+ check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
+ check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
+ check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
+ check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
+}
+
+#[test]
+fn test_trailing_characters() {
+ assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
+ assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2)));
+ assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
+ assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
+}
+
+#[test]
+fn test_read_identifiers() {
+ assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3)));
+
+ assert_eq!(from_str("null"), Ok(Null));
+ assert_eq!(from_str("true"), Ok(Boolean(true)));
+ assert_eq!(from_str("false"), Ok(Boolean(false)));
+ assert_eq!(from_str(" null "), Ok(Null));
+ assert_eq!(from_str(" true "), Ok(Boolean(true)));
+ assert_eq!(from_str(" false "), Ok(Boolean(false)));
+}
+
+#[test]
+fn test_decode_identifiers() {
+ let v: () = json::decode("null").unwrap();
+ assert_eq!(v, ());
+
+ let v: bool = json::decode("true").unwrap();
+ assert_eq!(v, true);
+
+ let v: bool = json::decode("false").unwrap();
+ assert_eq!(v, false);
+}
+
+#[test]
+fn test_read_number() {
+ assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
+ assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1)));
+ assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
+ assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2)));
+ assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2)));
+ assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3)));
+ assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3)));
+ assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
+
+ assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
+ assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
+
+ assert_eq!(from_str("3"), Ok(U64(3)));
+ assert_eq!(from_str("3.1"), Ok(F64(3.1)));
+ assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
+ assert_eq!(from_str("0.4"), Ok(F64(0.4)));
+ assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
+ assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
+ assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
+ assert_eq!(from_str(" 3 "), Ok(U64(3)));
+
+ assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
+ assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
+ assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
+}
+
+#[test]
+fn test_decode_numbers() {
+ let v: f64 = json::decode("3").unwrap();
+ assert_eq!(v, 3.0);
+
+ let v: f64 = json::decode("3.1").unwrap();
+ assert_eq!(v, 3.1);
+
+ let v: f64 = json::decode("-1.2").unwrap();
+ assert_eq!(v, -1.2);
+
+ let v: f64 = json::decode("0.4").unwrap();
+ assert_eq!(v, 0.4);
+
+ let v: f64 = json::decode("0.4e5").unwrap();
+ assert_eq!(v, 0.4e5);
+
+ let v: f64 = json::decode("0.4e15").unwrap();
+ assert_eq!(v, 0.4e15);
+
+ let v: f64 = json::decode("0.4e-01").unwrap();
+ assert_eq!(v, 0.4e-01);
+
+ let v: u64 = json::decode("0").unwrap();
+ assert_eq!(v, 0);
+
+ let v: u64 = json::decode("18446744073709551615").unwrap();
+ assert_eq!(v, u64::MAX);
+
+ let v: i64 = json::decode("-9223372036854775808").unwrap();
+ assert_eq!(v, i64::MIN);
+
+ let v: i64 = json::decode("9223372036854775807").unwrap();
+ assert_eq!(v, i64::MAX);
+
+ let res: DecodeResult<i64> = json::decode("765.25");
+ assert_eq!(res, Err(ExpectedError("Integer".to_string(),
+ "765.25".to_string())));
+}
+
+#[test]
+fn test_read_str() {
+ assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
+ assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
+
+ assert_eq!(from_str("\"\""), Ok(String("".to_string())));
+ assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
+ assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
+ assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
+ assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
+ assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
+ assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
+ assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
+ assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
+ assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
+}
+
+#[test]
+fn test_decode_str() {
+ let s = [("\"\"", ""),
+ ("\"foo\"", "foo"),
+ ("\"\\\"\"", "\""),
+ ("\"\\b\"", "\x08"),
+ ("\"\\n\"", "\n"),
+ ("\"\\r\"", "\r"),
+ ("\"\\t\"", "\t"),
+ ("\"\\u12ab\"", "\u{12ab}"),
+ ("\"\\uAB12\"", "\u{AB12}")];
+
+ for &(i, o) in &s {
+ let v: string::String = json::decode(i).unwrap();
+ assert_eq!(v, o);
+ }
+}
+
+#[test]
+fn test_read_array() {
+ assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
+ assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
+ assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
+ assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+
+ assert_eq!(from_str("[]"), Ok(Array(vec![])));
+ assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
+ assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
+ assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
+ assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
+ assert_eq!(from_str("[3, 1]"),
+ Ok(Array(vec![U64(3), U64(1)])));
+ assert_eq!(from_str("\n[3, 2]\n"),
+ Ok(Array(vec![U64(3), U64(2)])));
+ assert_eq!(from_str("[2, [4, 1]]"),
+ Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
+}
+
+#[test]
+fn test_decode_array() {
+ let v: Vec<()> = json::decode("[]").unwrap();
+ assert_eq!(v, []);
+
+ let v: Vec<()> = json::decode("[null]").unwrap();
+ assert_eq!(v, [()]);
+
+ let v: Vec<bool> = json::decode("[true]").unwrap();
+ assert_eq!(v, [true]);
+
+ let v: Vec<isize> = json::decode("[3, 1]").unwrap();
+ assert_eq!(v, [3, 1]);
+
+ let v: Vec<Vec<usize>> = json::decode("[[3], [1, 2]]").unwrap();
+ assert_eq!(v, [vec![3], vec![1, 2]]);
+}
+
+#[test]
+fn test_decode_tuple() {
+ let t: (usize, usize, usize) = json::decode("[1, 2, 3]").unwrap();
+ assert_eq!(t, (1, 2, 3));
+
+ let t: (usize, string::String) = json::decode("[1, \"two\"]").unwrap();
+ assert_eq!(t, (1, "two".to_string()));
+}
+
+#[test]
+fn test_decode_tuple_malformed_types() {
+ assert!(json::decode::<(usize, string::String)>("[1, 2]").is_err());
+}
+
+#[test]
+fn test_decode_tuple_malformed_length() {
+ assert!(json::decode::<(usize, usize)>("[1, 2, 3]").is_err());
+}
+
+#[test]
+fn test_read_object() {
+ assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
+ assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
+ assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2)));
+ assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
+ assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
+ assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
+
+ assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6)));
+ assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6)));
+ assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
+ assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8)));
+ assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
+
+ assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
+ assert_eq!(from_str("{\"a\": 3}").unwrap(),
+ mk_object(&[("a".to_string(), U64(3))]));
+
+ assert_eq!(from_str(
+ "{ \"a\": null, \"b\" : true }").unwrap(),
+ mk_object(&[
+ ("a".to_string(), Null),
+ ("b".to_string(), Boolean(true))]));
+ assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
+ mk_object(&[
+ ("a".to_string(), Null),
+ ("b".to_string(), Boolean(true))]));
+ assert_eq!(from_str(
+ "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
+ mk_object(&[
+ ("a".to_string(), F64(1.0)),
+ ("b".to_string(), Array(vec![Boolean(true)]))
+ ]));
+ assert_eq!(from_str(
+ "{\
+ \"a\": 1.0, \
+ \"b\": [\
+ true,\
+ \"foo\\nbar\", \
+ { \"c\": {\"d\": null} } \
+ ]\
+ }").unwrap(),
+ mk_object(&[
+ ("a".to_string(), F64(1.0)),
+ ("b".to_string(), Array(vec![
+ Boolean(true),
+ String("foo\nbar".to_string()),
+ mk_object(&[
+ ("c".to_string(), mk_object(&[("d".to_string(), Null)]))
+ ])
+ ]))
+ ]));
+}
+
+#[test]
+fn test_decode_struct() {
+ let s = "{
+ \"inner\": [
+ { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
+ ]
+ }";
+
+ let v: Outer = json::decode(s).unwrap();
+ assert_eq!(
+ v,
+ Outer {
+ inner: vec![
+ Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
+ ]
+ }
+ );
+}
+
+#[derive(RustcDecodable)]
+struct FloatStruct {
+ f: f64,
+ a: Vec<f64>
+}
+#[test]
+fn test_decode_struct_with_nan() {
+ let s = "{\"f\":null,\"a\":[null,123]}";
+ let obj: FloatStruct = json::decode(s).unwrap();
+ assert!(obj.f.is_nan());
+ assert!(obj.a[0].is_nan());
+ assert_eq!(obj.a[1], 123f64);
+}
+
+#[test]
+fn test_decode_option() {
+ let value: Option<string::String> = json::decode("null").unwrap();
+ assert_eq!(value, None);
+
+ let value: Option<string::String> = json::decode("\"jodhpurs\"").unwrap();
+ assert_eq!(value, Some("jodhpurs".to_string()));
+}
+
+#[test]
+fn test_decode_enum() {
+ let value: Animal = json::decode("\"Dog\"").unwrap();
+ assert_eq!(value, Dog);
+
+ let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
+ let value: Animal = json::decode(s).unwrap();
+ assert_eq!(value, Frog("Henry".to_string(), 349));
+}
+
+#[test]
+fn test_decode_map() {
+ let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
+ \"fields\":[\"Henry\", 349]}}";
+ let mut map: BTreeMap<string::String, Animal> = json::decode(s).unwrap();
+
+ assert_eq!(map.remove(&"a".to_string()), Some(Dog));
+ assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
+}
+
+#[test]
+fn test_multiline_errors() {
+ assert_eq!(from_str("{\n \"foo\":\n \"bar\""),
+ Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
+}
+
+#[derive(RustcDecodable)]
+#[allow(dead_code)]
+struct DecodeStruct {
+ x: f64,
+ y: bool,
+ z: string::String,
+ w: Vec<DecodeStruct>
+}
+#[derive(RustcDecodable)]
+enum DecodeEnum {
+ A(f64),
+ B(string::String)
+}
+fn check_err<T: Decodable>(to_parse: &'static str, expected: DecoderError) {
+ let res: DecodeResult<T> = match from_str(to_parse) {
+ Err(e) => Err(ParseError(e)),
+ Ok(json) => Decodable::decode(&mut Decoder::new(json))
+ };
+ match res {
+ Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`",
+ to_parse, expected),
+ Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}",
+ to_parse, e),
+ Err(e) => {
+ assert_eq!(e, expected);
+ }
+ }
+}
+#[test]
+fn test_decode_errors_struct() {
+ check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
+ check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
+ ExpectedError("Number".to_string(), "true".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
+ ExpectedError("Boolean".to_string(), "[]".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
+ ExpectedError("String".to_string(), "{}".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
+ ExpectedError("Array".to_string(), "null".to_string()));
+ check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
+ MissingFieldError("w".to_string()));
+}
+#[test]
+fn test_decode_errors_enum() {
+ check_err::<DecodeEnum>("{}",
+ MissingFieldError("variant".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": 1}",
+ ExpectedError("String".to_string(), "1".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": \"A\"}",
+ MissingFieldError("fields".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
+ ExpectedError("Array".to_string(), "null".to_string()));
+ check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
+ UnknownVariantError("C".to_string()));
+}
+
+#[test]
+fn test_find(){
+ let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
+ let found_str = json_value.find("dog");
+ assert!(found_str.unwrap().as_string().unwrap() == "cat");
+}
+
+#[test]
+fn test_find_path(){
+ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
+ let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
+ assert!(found_str.unwrap().as_string().unwrap() == "cheese");
+}
+
+#[test]
+fn test_search(){
+ let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
+ let found_str = json_value.search("mouse").and_then(|j| j.as_string());
+ assert!(found_str.unwrap() == "cheese");
+}
+
+#[test]
+fn test_index(){
+ let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
+ let ref array = json_value["animals"];
+ assert_eq!(array[0].as_string().unwrap(), "dog");
+ assert_eq!(array[1].as_string().unwrap(), "cat");
+ assert_eq!(array[2].as_string().unwrap(), "mouse");
+}
+
+#[test]
+fn test_is_object(){
+ let json_value = from_str("{}").unwrap();
+ assert!(json_value.is_object());
+}
+
+#[test]
+fn test_as_object(){
+ let json_value = from_str("{}").unwrap();
+ let json_object = json_value.as_object();
+ assert!(json_object.is_some());
+}
+
+#[test]
+fn test_is_array(){
+ let json_value = from_str("[1, 2, 3]").unwrap();
+ assert!(json_value.is_array());
+}
+
+#[test]
+fn test_as_array(){
+ let json_value = from_str("[1, 2, 3]").unwrap();
+ let json_array = json_value.as_array();
+ let expected_length = 3;
+ assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
+}
+
+#[test]
+fn test_is_string(){
+ let json_value = from_str("\"dog\"").unwrap();
+ assert!(json_value.is_string());
+}
+
+#[test]
+fn test_as_string(){
+ let json_value = from_str("\"dog\"").unwrap();
+ let json_str = json_value.as_string();
+ let expected_str = "dog";
+ assert_eq!(json_str, Some(expected_str));
+}
+
+#[test]
+fn test_is_number(){
+ let json_value = from_str("12").unwrap();
+ assert!(json_value.is_number());
+}
+
+#[test]
+fn test_is_i64(){
+ let json_value = from_str("-12").unwrap();
+ assert!(json_value.is_i64());
+
+ let json_value = from_str("12").unwrap();
+ assert!(!json_value.is_i64());
+
+ let json_value = from_str("12.0").unwrap();
+ assert!(!json_value.is_i64());
+}
+
+#[test]
+fn test_is_u64(){
+ let json_value = from_str("12").unwrap();
+ assert!(json_value.is_u64());
+
+ let json_value = from_str("-12").unwrap();
+ assert!(!json_value.is_u64());
+
+ let json_value = from_str("12.0").unwrap();
+ assert!(!json_value.is_u64());
+}
+
+#[test]
+fn test_is_f64(){
+ let json_value = from_str("12").unwrap();
+ assert!(!json_value.is_f64());
+
+ let json_value = from_str("-12").unwrap();
+ assert!(!json_value.is_f64());
+
+ let json_value = from_str("12.0").unwrap();
+ assert!(json_value.is_f64());
+
+ let json_value = from_str("-12.0").unwrap();
+ assert!(json_value.is_f64());
+}
+
+#[test]
+fn test_as_i64(){
+ let json_value = from_str("-12").unwrap();
+ let json_num = json_value.as_i64();
+ assert_eq!(json_num, Some(-12));
+}
+
+#[test]
+fn test_as_u64(){
+ let json_value = from_str("12").unwrap();
+ let json_num = json_value.as_u64();
+ assert_eq!(json_num, Some(12));
+}
+
+#[test]
+fn test_as_f64(){
+ let json_value = from_str("12.0").unwrap();
+ let json_num = json_value.as_f64();
+ assert_eq!(json_num, Some(12f64));
+}
+
+#[test]
+fn test_is_boolean(){
+ let json_value = from_str("false").unwrap();
+ assert!(json_value.is_boolean());
+}
+
+#[test]
+fn test_as_boolean(){
+ let json_value = from_str("false").unwrap();
+ let json_bool = json_value.as_boolean();
+ let expected_bool = false;
+ assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
+}
+
+#[test]
+fn test_is_null(){
+ let json_value = from_str("null").unwrap();
+ assert!(json_value.is_null());
+}
+
+#[test]
+fn test_as_null(){
+ let json_value = from_str("null").unwrap();
+ let json_null = json_value.as_null();
+ let expected_null = ();
+ assert!(json_null.is_some() && json_null.unwrap() == expected_null);
+}
+
+#[test]
+fn test_encode_hashmap_with_numeric_key() {
+ use std::str::from_utf8;
+ use std::collections::HashMap;
+ let mut hm: HashMap<usize, bool> = HashMap::new();
+ hm.insert(1, true);
+ let mut mem_buf = Vec::new();
+ write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
+ let json_str = from_utf8(&mem_buf[..]).unwrap();
+ match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ _ => {} // it parsed and we are good to go
+ }
+}
+
+#[test]
+fn test_prettyencode_hashmap_with_numeric_key() {
+ use std::str::from_utf8;
+ use std::collections::HashMap;
+ let mut hm: HashMap<usize, bool> = HashMap::new();
+ hm.insert(1, true);
+ let mut mem_buf = Vec::new();
+ write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
+ let json_str = from_utf8(&mem_buf[..]).unwrap();
+ match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ _ => {} // it parsed and we are good to go
+ }
+}
+
+#[test]
+fn test_prettyencoder_indent_level_param() {
+ use std::str::from_utf8;
+ use std::collections::BTreeMap;
+
+ let mut tree = BTreeMap::new();
+
+ tree.insert("hello".to_string(), String("guten tag".to_string()));
+ tree.insert("goodbye".to_string(), String("sayonara".to_string()));
+
+ let json = Array(
+ // The following layout below should look a lot like
+ // the pretty-printed JSON (indent * x)
+ vec!
+ ( // 0x
+ String("greetings".to_string()), // 1x
+ Object(tree), // 1x + 2x + 2x + 1x
+ ) // 0x
+ // End JSON array (7 lines)
+ );
+
+ // Helper function for counting indents
+ fn indents(source: &str) -> usize {
+ let trimmed = source.trim_start_matches(' ');
+ source.len() - trimmed.len()
+ }
+
+ // Test up to 4 spaces of indents (more?)
+ for i in 0..4 {
+ let mut writer = Vec::new();
+ write!(&mut writer, "{}",
+ json::as_pretty_json(&json).indent(i)).unwrap();
+
+ let printed = from_utf8(&writer[..]).unwrap();
+
+ // Check for indents at each line
+ let lines: Vec<&str> = printed.lines().collect();
+ assert_eq!(lines.len(), 7); // JSON should be 7 lines
+
+ assert_eq!(indents(lines[0]), 0 * i); // [
+ assert_eq!(indents(lines[1]), 1 * i); // "greetings",
+ assert_eq!(indents(lines[2]), 1 * i); // {
+ assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag",
+ assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara"
+ assert_eq!(indents(lines[5]), 1 * i); // },
+ assert_eq!(indents(lines[6]), 0 * i); // ]
+
+ // Finally, test that the pretty-printed JSON is valid
+ from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
+ }
+}
+
+#[test]
+fn test_hashmap_with_enum_key() {
+ use std::collections::HashMap;
+ #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)]
+ enum Enum {
+ Foo,
+ #[allow(dead_code)]
+ Bar,
+ }
+ let mut map = HashMap::new();
+ map.insert(Enum::Foo, 0);
+ let result = json::encode(&map).unwrap();
+ assert_eq!(&result[..], r#"{"Foo":0}"#);
+ let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
+ assert_eq!(map, decoded);
+}
+
+#[test]
+fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
+ use std::collections::HashMap;
+ let json_str = "{\"1\":true}";
+ let json_obj = match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ Ok(o) => o
+ };
+ let mut decoder = Decoder::new(json_obj);
+ let _hm: HashMap<usize, bool> = Decodable::decode(&mut decoder).unwrap();
+}
+
+#[test]
+fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
+ use std::collections::HashMap;
+ let json_str = "{\"a\":true}";
+ let json_obj = match from_str(json_str) {
+ Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
+ Ok(o) => o
+ };
+ let mut decoder = Decoder::new(json_obj);
+ let result: Result<HashMap<usize, bool>, DecoderError> = Decodable::decode(&mut decoder);
+ assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
+}
+
+fn assert_stream_equal(src: &str,
+ expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) {
+ let mut parser = Parser::new(src.chars());
+ let mut i = 0;
+ loop {
+ let evt = match parser.next() {
+ Some(e) => e,
+ None => { break; }
+ };
+ let (ref expected_evt, ref expected_stack) = expected[i];
+ if !parser.stack().is_equal_to(expected_stack) {
+ panic!("Parser stack is not equal to {:?}", expected_stack);
+ }
+ assert_eq!(&evt, expected_evt);
+ i+=1;
+ }
+}
+#[test]
+fn test_streaming_parser() {
+ assert_stream_equal(
+ r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
+ vec![
+ (ObjectStart, vec![]),
+ (StringValue("bar".to_string()), vec![StackElement::Key("foo")]),
+ (ArrayStart, vec![StackElement::Key("array")]),
+ (U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]),
+ (U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]),
+ (U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]),
+ (U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]),
+ (U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]),
+ (U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]),
+ (ArrayEnd, vec![StackElement::Key("array")]),
+ (ArrayStart, vec![StackElement::Key("idents")]),
+ (NullValue, vec![StackElement::Key("idents"),
+ StackElement::Index(0)]),
+ (BooleanValue(true), vec![StackElement::Key("idents"),
+ StackElement::Index(1)]),
+ (BooleanValue(false), vec![StackElement::Key("idents"),
+ StackElement::Index(2)]),
+ (ArrayEnd, vec![StackElement::Key("idents")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+}
+fn last_event(src: &str) -> JsonEvent {
+ let mut parser = Parser::new(src.chars());
+ let mut evt = NullValue;
+ loop {
+ evt = match parser.next() {
+ Some(e) => e,
+ None => return evt,
+ }
+ }
+}
+
+#[test]
+fn test_read_object_streaming() {
+ assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
+ assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2)));
+ assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
+ assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
+ assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
+
+ assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6)));
+ assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6)));
+ assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
+ assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8)));
+ assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
+ assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
+
+ assert_stream_equal(
+ "{}",
+ vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]
+ );
+ assert_stream_equal(
+ "{\"a\": 3}",
+ vec![
+ (ObjectStart, vec![]),
+ (U64Value(3), vec![StackElement::Key("a")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "{ \"a\": null, \"b\" : true }",
+ vec![
+ (ObjectStart, vec![]),
+ (NullValue, vec![StackElement::Key("a")]),
+ (BooleanValue(true), vec![StackElement::Key("b")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "{\"a\" : 1.0 ,\"b\": [ true ]}",
+ vec![
+ (ObjectStart, vec![]),
+ (F64Value(1.0), vec![StackElement::Key("a")]),
+ (ArrayStart, vec![StackElement::Key("b")]),
+ (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]),
+ (ArrayEnd, vec![StackElement::Key("b")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ r#"{
+ "a": 1.0,
+ "b": [
+ true,
+ "foo\nbar",
+ { "c": {"d": null} }
+ ]
+ }"#,
+ vec![
+ (ObjectStart, vec![]),
+ (F64Value(1.0), vec![StackElement::Key("a")]),
+ (ArrayStart, vec![StackElement::Key("b")]),
+ (BooleanValue(true), vec![StackElement::Key("b"),
+ StackElement::Index(0)]),
+ (StringValue("foo\nbar".to_string()), vec![StackElement::Key("b"),
+ StackElement::Index(1)]),
+ (ObjectStart, vec![StackElement::Key("b"),
+ StackElement::Index(2)]),
+ (ObjectStart, vec![StackElement::Key("b"),
+ StackElement::Index(2),
+ StackElement::Key("c")]),
+ (NullValue, vec![StackElement::Key("b"),
+ StackElement::Index(2),
+ StackElement::Key("c"),
+ StackElement::Key("d")]),
+ (ObjectEnd, vec![StackElement::Key("b"),
+ StackElement::Index(2),
+ StackElement::Key("c")]),
+ (ObjectEnd, vec![StackElement::Key("b"),
+ StackElement::Index(2)]),
+ (ArrayEnd, vec![StackElement::Key("b")]),
+ (ObjectEnd, vec![]),
+ ]
+ );
+}
+#[test]
+fn test_read_array_streaming() {
+ assert_stream_equal(
+ "[]",
+ vec![
+ (ArrayStart, vec![]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[ ]",
+ vec![
+ (ArrayStart, vec![]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[true]",
+ vec![
+ (ArrayStart, vec![]),
+ (BooleanValue(true), vec![StackElement::Index(0)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[ false ]",
+ vec![
+ (ArrayStart, vec![]),
+ (BooleanValue(false), vec![StackElement::Index(0)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[null]",
+ vec![
+ (ArrayStart, vec![]),
+ (NullValue, vec![StackElement::Index(0)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[3, 1]",
+ vec![
+ (ArrayStart, vec![]),
+ (U64Value(3), vec![StackElement::Index(0)]),
+ (U64Value(1), vec![StackElement::Index(1)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "\n[3, 2]\n",
+ vec![
+ (ArrayStart, vec![]),
+ (U64Value(3), vec![StackElement::Index(0)]),
+ (U64Value(2), vec![StackElement::Index(1)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+ assert_stream_equal(
+ "[2, [4, 1]]",
+ vec![
+ (ArrayStart, vec![]),
+ (U64Value(2), vec![StackElement::Index(0)]),
+ (ArrayStart, vec![StackElement::Index(1)]),
+ (U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]),
+ (U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]),
+ (ArrayEnd, vec![StackElement::Index(1)]),
+ (ArrayEnd, vec![]),
+ ]
+ );
+
+ assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2)));
+
+ assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
+ assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
+ assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
+ assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
+
+}
+#[test]
+fn test_trailing_characters_streaming() {
+ assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5)));
+ assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
+ assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2)));
+ assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
+ assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
+}
+#[test]
+fn test_read_identifiers_streaming() {
+ assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
+ assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
+ assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
+
+ assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
+ assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2)));
+ assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3)));
+}
+
+#[test]
+fn test_to_json() {
+ use std::collections::{HashMap,BTreeMap};
+ use json::ToJson;
+
+ let array2 = Array(vec![U64(1), U64(2)]);
+ let array3 = Array(vec![U64(1), U64(2), U64(3)]);
+ let object = {
+ let mut tree_map = BTreeMap::new();
+ tree_map.insert("a".to_string(), U64(1));
+ tree_map.insert("b".to_string(), U64(2));
+ Object(tree_map)
+ };
+
+ assert_eq!(array2.to_json(), array2);
+ assert_eq!(object.to_json(), object);
+ assert_eq!(3_isize.to_json(), I64(3));
+ assert_eq!(4_i8.to_json(), I64(4));
+ assert_eq!(5_i16.to_json(), I64(5));
+ assert_eq!(6_i32.to_json(), I64(6));
+ assert_eq!(7_i64.to_json(), I64(7));
+ assert_eq!(8_usize.to_json(), U64(8));
+ assert_eq!(9_u8.to_json(), U64(9));
+ assert_eq!(10_u16.to_json(), U64(10));
+ assert_eq!(11_u32.to_json(), U64(11));
+ assert_eq!(12_u64.to_json(), U64(12));
+ assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
+ assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
+ assert_eq!(().to_json(), Null);
+ assert_eq!(f32::INFINITY.to_json(), Null);
+ assert_eq!(f64::NAN.to_json(), Null);
+ assert_eq!(true.to_json(), Boolean(true));
+ assert_eq!(false.to_json(), Boolean(false));
+ assert_eq!("abc".to_json(), String("abc".to_string()));
+ assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
+ assert_eq!((1_usize, 2_usize).to_json(), array2);
+ assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
+ assert_eq!([1_usize, 2_usize].to_json(), array2);
+ assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
+ assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
+ assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
+ let mut tree_map = BTreeMap::new();
+ tree_map.insert("a".to_string(), 1 as usize);
+ tree_map.insert("b".to_string(), 2);
+ assert_eq!(tree_map.to_json(), object);
+ let mut hash_map = HashMap::new();
+ hash_map.insert("a".to_string(), 1 as usize);
+ hash_map.insert("b".to_string(), 2);
+ assert_eq!(hash_map.to_json(), object);
+ assert_eq!(Some(15).to_json(), I64(15));
+ assert_eq!(Some(15 as usize).to_json(), U64(15));
+ assert_eq!(None::<isize>.to_json(), Null);
+}
+
+#[test]
+fn test_encode_hashmap_with_arbitrary_key() {
+ use std::collections::HashMap;
+ #[derive(PartialEq, Eq, Hash, RustcEncodable)]
+ struct ArbitraryType(usize);
+ let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
+ hm.insert(ArbitraryType(1), true);
+ let mut mem_buf = string::String::new();
+ let mut encoder = Encoder::new(&mut mem_buf);
+ let result = hm.encode(&mut encoder);
+ match result.unwrap_err() {
+ EncoderError::BadHashmapKey => (),
+ _ => panic!("expected bad hash map key")
+ }
+}
--- /dev/null
+extern crate serialize as rustc_serialize;
+
+use rustc_serialize::{Encodable, Decodable};
+use rustc_serialize::opaque::{Encoder, Decoder};
+use std::fmt::Debug;
+
+#[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+struct Struct {
+ a: (),
+ b: u8,
+ c: u16,
+ d: u32,
+ e: u64,
+ f: usize,
+
+ g: i8,
+ h: i16,
+ i: i32,
+ j: i64,
+ k: isize,
+
+ l: char,
+ m: String,
+ n: f32,
+ o: f64,
+ p: bool,
+ q: Option<u32>,
+}
+
+
+fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
+ let mut encoder = Encoder::new(Vec::new());
+
+ for value in &values {
+ Encodable::encode(&value, &mut encoder).unwrap();
+ }
+
+ let data = encoder.into_inner();
+ let mut decoder = Decoder::new(&data[..], 0);
+
+ for value in values {
+ let decoded = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(value, decoded);
+ }
+}
+
+#[test]
+fn test_unit() {
+ check_round_trip(vec![(), (), (), ()]);
+}
+
+#[test]
+fn test_u8() {
+ let mut vec = vec![];
+ for i in ::std::u8::MIN..::std::u8::MAX {
+ vec.push(i);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_u16() {
+ for i in ::std::u16::MIN..::std::u16::MAX {
+ check_round_trip(vec![1, 2, 3, i, i, i]);
+ }
+}
+
+#[test]
+fn test_u32() {
+ check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
+}
+
+#[test]
+fn test_u64() {
+ check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
+}
+
+#[test]
+fn test_usize() {
+ check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
+}
+
+#[test]
+fn test_i8() {
+ let mut vec = vec![];
+ for i in ::std::i8::MIN..::std::i8::MAX {
+ vec.push(i);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_i16() {
+ for i in ::std::i16::MIN..::std::i16::MAX {
+ check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
+ }
+}
+
+#[test]
+fn test_i32() {
+ check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
+}
+
+#[test]
+fn test_i64() {
+ check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
+}
+
+#[test]
+fn test_isize() {
+ check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
+}
+
+#[test]
+fn test_bool() {
+ check_round_trip(vec![false, true, true, false, false]);
+}
+
+#[test]
+fn test_f32() {
+ let mut vec = vec![];
+ for i in -100..100 {
+ vec.push((i as f32) / 3.0);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_f64() {
+ let mut vec = vec![];
+ for i in -100..100 {
+ vec.push((i as f64) / 3.0);
+ }
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_char() {
+ let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_string() {
+ let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
+ "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
+
+ check_round_trip(vec);
+}
+
+#[test]
+fn test_option() {
+ check_round_trip(vec![Some(-1i8)]);
+ check_round_trip(vec![Some(-2i16)]);
+ check_round_trip(vec![Some(-3i32)]);
+ check_round_trip(vec![Some(-4i64)]);
+ check_round_trip(vec![Some(-5isize)]);
+
+ let none_i8: Option<i8> = None;
+ check_round_trip(vec![none_i8]);
+
+ let none_i16: Option<i16> = None;
+ check_round_trip(vec![none_i16]);
+
+ let none_i32: Option<i32> = None;
+ check_round_trip(vec![none_i32]);
+
+ let none_i64: Option<i64> = None;
+ check_round_trip(vec![none_i64]);
+
+ let none_isize: Option<isize> = None;
+ check_round_trip(vec![none_isize]);
+}
+
+#[test]
+fn test_struct() {
+ check_round_trip(vec![Struct {
+ a: (),
+ b: 10,
+ c: 11,
+ d: 12,
+ e: 13,
+ f: 14,
+
+ g: 15,
+ h: 16,
+ i: 17,
+ j: 18,
+ k: 19,
+
+ l: 'x',
+ m: "abc".to_string(),
+ n: 20.5,
+ o: 21.5,
+ p: false,
+ q: None,
+ }]);
+
+ check_round_trip(vec![Struct {
+ a: (),
+ b: 101,
+ c: 111,
+ d: 121,
+ e: 131,
+ f: 141,
+
+ g: -15,
+ h: -16,
+ i: -17,
+ j: -18,
+ k: -19,
+
+ l: 'y',
+ m: "def".to_string(),
+ n: -20.5,
+ o: -21.5,
+ p: true,
+ q: Some(1234567),
+ }]);
+}
+
+#[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+enum Enum {
+ Variant1,
+ Variant2(usize, f32),
+ Variant3 {
+ a: i32,
+ b: char,
+ c: bool,
+ },
+}
+
+#[test]
+fn test_enum() {
+ check_round_trip(vec![Enum::Variant1,
+ Enum::Variant2(1, 2.5),
+ Enum::Variant3 {
+ a: 3,
+ b: 'b',
+ c: false,
+ },
+ Enum::Variant3 {
+ a: -4,
+ b: 'f',
+ c: true,
+ }]);
+}
+
+#[test]
+fn test_sequence() {
+ let mut vec = vec![];
+ for i in -100i64..100i64 {
+ vec.push(i * 100000);
+ }
+
+ check_round_trip(vec![vec]);
+}
+
+#[test]
+fn test_hash_map() {
+ use std::collections::HashMap;
+ let mut map = HashMap::new();
+ for i in -100i64..100i64 {
+ map.insert(i * 100000, i * 10000);
+ }
+
+ check_round_trip(vec![map]);
+}
+
+#[test]
+fn test_tuples() {
+ check_round_trip(vec![('x', (), false, 0.5f32)]);
+ check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
+ check_round_trip(vec![(-12i16, 11u8, 12usize)]);
+ check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
+ check_round_trip(vec![(String::new(), "some string".to_string())]);
+}
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(__rust_unstable_column, libstd_sys_internals))]
macro_rules! panic {
() => ({
panic!("explicit panic")
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(print_internals))]
macro_rules! print {
($($arg:tt)*) => ($crate::io::_print(format_args!($($arg)*)));
}
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(print_internals, format_args_nl))]
macro_rules! println {
() => (print!("\n"));
($($arg:tt)*) => ({
/// ```
#[macro_export]
#[stable(feature = "eprint", since = "1.19.0")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(print_internals))]
macro_rules! eprint {
($($arg:tt)*) => ($crate::io::_eprint(format_args!($($arg)*)));
}
/// ```
#[macro_export]
#[stable(feature = "eprint", since = "1.19.0")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(print_internals, format_args_nl))]
macro_rules! eprintln {
() => (eprint!("\n"));
($($arg:tt)*) => ({
/// A macro to await on an async call.
#[macro_export]
#[unstable(feature = "await_macro", issue = "50547")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(gen_future, generators))]
#[allow_internal_unsafe]
macro_rules! await {
($e:expr) => { {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn octets(&self) -> [u8; 4] {
- let bits = u32::from_be(self.inner.s_addr);
- [(bits >> 24) as u8, (bits >> 16) as u8, (bits >> 8) as u8, bits as u8]
+ self.inner.s_addr.to_ne_bytes()
}
/// Returns [`true`] for the special 'unspecified' address (0.0.0.0).
use self::select::StartResult::*;
use self::blocking::SignalToken;
+#[cfg(all(test, not(target_os = "emscripten")))]
+mod select_tests;
+
mod blocking;
mod oneshot;
mod select;
}
/// Helper method for skipping the preflight checks during testing
- fn wait2(&self, do_preflight_checks: bool) -> usize {
+ pub(super) fn wait2(&self, do_preflight_checks: bool) -> usize {
// Note that this is currently an inefficient implementation. We in
// theory have knowledge about all receivers in the set ahead of time,
// so this method shouldn't really have to iterate over all of them yet
f.debug_struct("Handle").finish()
}
}
-
-#[allow(unused_imports)]
-#[cfg(all(test, not(target_os = "emscripten")))]
-mod tests {
- use thread;
- use sync::mpsc::*;
-
- // Don't use the libstd version so we can pull in the right Select structure
- // (std::comm points at the wrong one)
- macro_rules! select {
- (
- $($name:pat = $rx:ident.$meth:ident() => $code:expr),+
- ) => ({
- let sel = Select::new();
- $( let mut $rx = sel.handle(&$rx); )+
- unsafe {
- $( $rx.add(); )+
- }
- let ret = sel.wait();
- $( if ret == $rx.id() { let $name = $rx.$meth(); $code } else )+
- { unreachable!() }
- })
- }
-
- #[test]
- fn smoke() {
- let (tx1, rx1) = channel::<i32>();
- let (tx2, rx2) = channel::<i32>();
- tx1.send(1).unwrap();
- select! {
- foo = rx1.recv() => { assert_eq!(foo.unwrap(), 1); },
- _bar = rx2.recv() => { panic!() }
- }
- tx2.send(2).unwrap();
- select! {
- _foo = rx1.recv() => { panic!() },
- bar = rx2.recv() => { assert_eq!(bar.unwrap(), 2) }
- }
- drop(tx1);
- select! {
- foo = rx1.recv() => { assert!(foo.is_err()); },
- _bar = rx2.recv() => { panic!() }
- }
- drop(tx2);
- select! {
- bar = rx2.recv() => { assert!(bar.is_err()); }
- }
- }
-
- #[test]
- fn smoke2() {
- let (_tx1, rx1) = channel::<i32>();
- let (_tx2, rx2) = channel::<i32>();
- let (_tx3, rx3) = channel::<i32>();
- let (_tx4, rx4) = channel::<i32>();
- let (tx5, rx5) = channel::<i32>();
- tx5.send(4).unwrap();
- select! {
- _foo = rx1.recv() => { panic!("1") },
- _foo = rx2.recv() => { panic!("2") },
- _foo = rx3.recv() => { panic!("3") },
- _foo = rx4.recv() => { panic!("4") },
- foo = rx5.recv() => { assert_eq!(foo.unwrap(), 4); }
- }
- }
-
- #[test]
- fn closed() {
- let (_tx1, rx1) = channel::<i32>();
- let (tx2, rx2) = channel::<i32>();
- drop(tx2);
-
- select! {
- _a1 = rx1.recv() => { panic!() },
- a2 = rx2.recv() => { assert!(a2.is_err()); }
- }
- }
-
- #[test]
- fn unblocks() {
- let (tx1, rx1) = channel::<i32>();
- let (_tx2, rx2) = channel::<i32>();
- let (tx3, rx3) = channel::<i32>();
-
- let _t = thread::spawn(move|| {
- for _ in 0..20 { thread::yield_now(); }
- tx1.send(1).unwrap();
- rx3.recv().unwrap();
- for _ in 0..20 { thread::yield_now(); }
- });
-
- select! {
- a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
- _b = rx2.recv() => { panic!() }
- }
- tx3.send(1).unwrap();
- select! {
- a = rx1.recv() => { assert!(a.is_err()) },
- _b = rx2.recv() => { panic!() }
- }
- }
-
- #[test]
- fn both_ready() {
- let (tx1, rx1) = channel::<i32>();
- let (tx2, rx2) = channel::<i32>();
- let (tx3, rx3) = channel::<()>();
-
- let _t = thread::spawn(move|| {
- for _ in 0..20 { thread::yield_now(); }
- tx1.send(1).unwrap();
- tx2.send(2).unwrap();
- rx3.recv().unwrap();
- });
-
- select! {
- a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
- a = rx2.recv() => { assert_eq!(a.unwrap(), 2); }
- }
- select! {
- a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
- a = rx2.recv() => { assert_eq!(a.unwrap(), 2); }
- }
- assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty));
- assert_eq!(rx2.try_recv(), Err(TryRecvError::Empty));
- tx3.send(()).unwrap();
- }
-
- #[test]
- fn stress() {
- const AMT: i32 = 10000;
- let (tx1, rx1) = channel::<i32>();
- let (tx2, rx2) = channel::<i32>();
- let (tx3, rx3) = channel::<()>();
-
- let _t = thread::spawn(move|| {
- for i in 0..AMT {
- if i % 2 == 0 {
- tx1.send(i).unwrap();
- } else {
- tx2.send(i).unwrap();
- }
- rx3.recv().unwrap();
- }
- });
-
- for i in 0..AMT {
- select! {
- i1 = rx1.recv() => { assert!(i % 2 == 0 && i == i1.unwrap()); },
- i2 = rx2.recv() => { assert!(i % 2 == 1 && i == i2.unwrap()); }
- }
- tx3.send(()).unwrap();
- }
- }
-
- #[allow(unused_must_use)]
- #[test]
- fn cloning() {
- let (tx1, rx1) = channel::<i32>();
- let (_tx2, rx2) = channel::<i32>();
- let (tx3, rx3) = channel::<()>();
-
- let _t = thread::spawn(move|| {
- rx3.recv().unwrap();
- tx1.clone();
- assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty));
- tx1.send(2).unwrap();
- rx3.recv().unwrap();
- });
-
- tx3.send(()).unwrap();
- select! {
- _i1 = rx1.recv() => {},
- _i2 = rx2.recv() => panic!()
- }
- tx3.send(()).unwrap();
- }
-
- #[allow(unused_must_use)]
- #[test]
- fn cloning2() {
- let (tx1, rx1) = channel::<i32>();
- let (_tx2, rx2) = channel::<i32>();
- let (tx3, rx3) = channel::<()>();
-
- let _t = thread::spawn(move|| {
- rx3.recv().unwrap();
- tx1.clone();
- assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty));
- tx1.send(2).unwrap();
- rx3.recv().unwrap();
- });
-
- tx3.send(()).unwrap();
- select! {
- _i1 = rx1.recv() => {},
- _i2 = rx2.recv() => panic!()
- }
- tx3.send(()).unwrap();
- }
-
- #[test]
- fn cloning3() {
- let (tx1, rx1) = channel::<()>();
- let (tx2, rx2) = channel::<()>();
- let (tx3, rx3) = channel::<()>();
- let _t = thread::spawn(move|| {
- let s = Select::new();
- let mut h1 = s.handle(&rx1);
- let mut h2 = s.handle(&rx2);
- unsafe { h2.add(); }
- unsafe { h1.add(); }
- assert_eq!(s.wait(), h2.id);
- tx3.send(()).unwrap();
- });
-
- for _ in 0..1000 { thread::yield_now(); }
- drop(tx1.clone());
- tx2.send(()).unwrap();
- rx3.recv().unwrap();
- }
-
- #[test]
- fn preflight1() {
- let (tx, rx) = channel();
- tx.send(()).unwrap();
- select! {
- _n = rx.recv() => {}
- }
- }
-
- #[test]
- fn preflight2() {
- let (tx, rx) = channel();
- tx.send(()).unwrap();
- tx.send(()).unwrap();
- select! {
- _n = rx.recv() => {}
- }
- }
-
- #[test]
- fn preflight3() {
- let (tx, rx) = channel();
- drop(tx.clone());
- tx.send(()).unwrap();
- select! {
- _n = rx.recv() => {}
- }
- }
-
- #[test]
- fn preflight4() {
- let (tx, rx) = channel();
- tx.send(()).unwrap();
- let s = Select::new();
- let mut h = s.handle(&rx);
- unsafe { h.add(); }
- assert_eq!(s.wait2(false), h.id);
- }
-
- #[test]
- fn preflight5() {
- let (tx, rx) = channel();
- tx.send(()).unwrap();
- tx.send(()).unwrap();
- let s = Select::new();
- let mut h = s.handle(&rx);
- unsafe { h.add(); }
- assert_eq!(s.wait2(false), h.id);
- }
-
- #[test]
- fn preflight6() {
- let (tx, rx) = channel();
- drop(tx.clone());
- tx.send(()).unwrap();
- let s = Select::new();
- let mut h = s.handle(&rx);
- unsafe { h.add(); }
- assert_eq!(s.wait2(false), h.id);
- }
-
- #[test]
- fn preflight7() {
- let (tx, rx) = channel::<()>();
- drop(tx);
- let s = Select::new();
- let mut h = s.handle(&rx);
- unsafe { h.add(); }
- assert_eq!(s.wait2(false), h.id);
- }
-
- #[test]
- fn preflight8() {
- let (tx, rx) = channel();
- tx.send(()).unwrap();
- drop(tx);
- rx.recv().unwrap();
- let s = Select::new();
- let mut h = s.handle(&rx);
- unsafe { h.add(); }
- assert_eq!(s.wait2(false), h.id);
- }
-
- #[test]
- fn preflight9() {
- let (tx, rx) = channel();
- drop(tx.clone());
- tx.send(()).unwrap();
- drop(tx);
- rx.recv().unwrap();
- let s = Select::new();
- let mut h = s.handle(&rx);
- unsafe { h.add(); }
- assert_eq!(s.wait2(false), h.id);
- }
-
- #[test]
- fn oneshot_data_waiting() {
- let (tx1, rx1) = channel();
- let (tx2, rx2) = channel();
- let _t = thread::spawn(move|| {
- select! {
- _n = rx1.recv() => {}
- }
- tx2.send(()).unwrap();
- });
-
- for _ in 0..100 { thread::yield_now() }
- tx1.send(()).unwrap();
- rx2.recv().unwrap();
- }
-
- #[test]
- fn stream_data_waiting() {
- let (tx1, rx1) = channel();
- let (tx2, rx2) = channel();
- tx1.send(()).unwrap();
- tx1.send(()).unwrap();
- rx1.recv().unwrap();
- rx1.recv().unwrap();
- let _t = thread::spawn(move|| {
- select! {
- _n = rx1.recv() => {}
- }
- tx2.send(()).unwrap();
- });
-
- for _ in 0..100 { thread::yield_now() }
- tx1.send(()).unwrap();
- rx2.recv().unwrap();
- }
-
- #[test]
- fn shared_data_waiting() {
- let (tx1, rx1) = channel();
- let (tx2, rx2) = channel();
- drop(tx1.clone());
- tx1.send(()).unwrap();
- rx1.recv().unwrap();
- let _t = thread::spawn(move|| {
- select! {
- _n = rx1.recv() => {}
- }
- tx2.send(()).unwrap();
- });
-
- for _ in 0..100 { thread::yield_now() }
- tx1.send(()).unwrap();
- rx2.recv().unwrap();
- }
-
- #[test]
- fn sync1() {
- let (tx, rx) = sync_channel::<i32>(1);
- tx.send(1).unwrap();
- select! {
- n = rx.recv() => { assert_eq!(n.unwrap(), 1); }
- }
- }
-
- #[test]
- fn sync2() {
- let (tx, rx) = sync_channel::<i32>(0);
- let _t = thread::spawn(move|| {
- for _ in 0..100 { thread::yield_now() }
- tx.send(1).unwrap();
- });
- select! {
- n = rx.recv() => { assert_eq!(n.unwrap(), 1); }
- }
- }
-
- #[test]
- fn sync3() {
- let (tx1, rx1) = sync_channel::<i32>(0);
- let (tx2, rx2): (Sender<i32>, Receiver<i32>) = channel();
- let _t = thread::spawn(move|| { tx1.send(1).unwrap(); });
- let _t = thread::spawn(move|| { tx2.send(2).unwrap(); });
- select! {
- n = rx1.recv() => {
- let n = n.unwrap();
- assert_eq!(n, 1);
- assert_eq!(rx2.recv().unwrap(), 2);
- },
- n = rx2.recv() => {
- let n = n.unwrap();
- assert_eq!(n, 2);
- assert_eq!(rx1.recv().unwrap(), 1);
- }
- }
- }
-}
--- /dev/null
+#![allow(unused_imports)]
+
+/// This file exists to hack around https://github.com/rust-lang/rust/issues/47238
+
+use thread;
+use sync::mpsc::*;
+
+// Don't use the libstd version so we can pull in the right Select structure
+// (std::comm points at the wrong one)
+macro_rules! select {
+ (
+ $($name:pat = $rx:ident.$meth:ident() => $code:expr),+
+ ) => ({
+ let sel = Select::new();
+ $( let mut $rx = sel.handle(&$rx); )+
+ unsafe {
+ $( $rx.add(); )+
+ }
+ let ret = sel.wait();
+ $( if ret == $rx.id() { let $name = $rx.$meth(); $code } else )+
+ { unreachable!() }
+ })
+}
+
+#[test]
+fn smoke() {
+ let (tx1, rx1) = channel::<i32>();
+ let (tx2, rx2) = channel::<i32>();
+ tx1.send(1).unwrap();
+ select! {
+ foo = rx1.recv() => { assert_eq!(foo.unwrap(), 1); },
+ _bar = rx2.recv() => { panic!() }
+ }
+ tx2.send(2).unwrap();
+ select! {
+ _foo = rx1.recv() => { panic!() },
+ bar = rx2.recv() => { assert_eq!(bar.unwrap(), 2) }
+ }
+ drop(tx1);
+ select! {
+ foo = rx1.recv() => { assert!(foo.is_err()); },
+ _bar = rx2.recv() => { panic!() }
+ }
+ drop(tx2);
+ select! {
+ bar = rx2.recv() => { assert!(bar.is_err()); }
+ }
+}
+
+#[test]
+fn smoke2() {
+ let (_tx1, rx1) = channel::<i32>();
+ let (_tx2, rx2) = channel::<i32>();
+ let (_tx3, rx3) = channel::<i32>();
+ let (_tx4, rx4) = channel::<i32>();
+ let (tx5, rx5) = channel::<i32>();
+ tx5.send(4).unwrap();
+ select! {
+ _foo = rx1.recv() => { panic!("1") },
+ _foo = rx2.recv() => { panic!("2") },
+ _foo = rx3.recv() => { panic!("3") },
+ _foo = rx4.recv() => { panic!("4") },
+ foo = rx5.recv() => { assert_eq!(foo.unwrap(), 4); }
+ }
+}
+
+#[test]
+fn closed() {
+ let (_tx1, rx1) = channel::<i32>();
+ let (tx2, rx2) = channel::<i32>();
+ drop(tx2);
+
+ select! {
+ _a1 = rx1.recv() => { panic!() },
+ a2 = rx2.recv() => { assert!(a2.is_err()); }
+ }
+}
+
+#[test]
+fn unblocks() {
+ let (tx1, rx1) = channel::<i32>();
+ let (_tx2, rx2) = channel::<i32>();
+ let (tx3, rx3) = channel::<i32>();
+
+ let _t = thread::spawn(move|| {
+ for _ in 0..20 { thread::yield_now(); }
+ tx1.send(1).unwrap();
+ rx3.recv().unwrap();
+ for _ in 0..20 { thread::yield_now(); }
+ });
+
+ select! {
+ a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
+ _b = rx2.recv() => { panic!() }
+ }
+ tx3.send(1).unwrap();
+ select! {
+ a = rx1.recv() => { assert!(a.is_err()) },
+ _b = rx2.recv() => { panic!() }
+ }
+}
+
+#[test]
+fn both_ready() {
+ let (tx1, rx1) = channel::<i32>();
+ let (tx2, rx2) = channel::<i32>();
+ let (tx3, rx3) = channel::<()>();
+
+ let _t = thread::spawn(move|| {
+ for _ in 0..20 { thread::yield_now(); }
+ tx1.send(1).unwrap();
+ tx2.send(2).unwrap();
+ rx3.recv().unwrap();
+ });
+
+ select! {
+ a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
+ a = rx2.recv() => { assert_eq!(a.unwrap(), 2); }
+ }
+ select! {
+ a = rx1.recv() => { assert_eq!(a.unwrap(), 1); },
+ a = rx2.recv() => { assert_eq!(a.unwrap(), 2); }
+ }
+ assert_eq!(rx1.try_recv(), Err(TryRecvError::Empty));
+ assert_eq!(rx2.try_recv(), Err(TryRecvError::Empty));
+ tx3.send(()).unwrap();
+}
+
+#[test]
+fn stress() {
+ const AMT: i32 = 10000;
+ let (tx1, rx1) = channel::<i32>();
+ let (tx2, rx2) = channel::<i32>();
+ let (tx3, rx3) = channel::<()>();
+
+ let _t = thread::spawn(move|| {
+ for i in 0..AMT {
+ if i % 2 == 0 {
+ tx1.send(i).unwrap();
+ } else {
+ tx2.send(i).unwrap();
+ }
+ rx3.recv().unwrap();
+ }
+ });
+
+ for i in 0..AMT {
+ select! {
+ i1 = rx1.recv() => { assert!(i % 2 == 0 && i == i1.unwrap()); },
+ i2 = rx2.recv() => { assert!(i % 2 == 1 && i == i2.unwrap()); }
+ }
+ tx3.send(()).unwrap();
+ }
+}
+
+#[allow(unused_must_use)]
+#[test]
+fn cloning() {
+ let (tx1, rx1) = channel::<i32>();
+ let (_tx2, rx2) = channel::<i32>();
+ let (tx3, rx3) = channel::<()>();
+
+ let _t = thread::spawn(move|| {
+ rx3.recv().unwrap();
+ tx1.clone();
+ assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty));
+ tx1.send(2).unwrap();
+ rx3.recv().unwrap();
+ });
+
+ tx3.send(()).unwrap();
+ select! {
+ _i1 = rx1.recv() => {},
+ _i2 = rx2.recv() => panic!()
+ }
+ tx3.send(()).unwrap();
+}
+
+#[allow(unused_must_use)]
+#[test]
+fn cloning2() {
+ let (tx1, rx1) = channel::<i32>();
+ let (_tx2, rx2) = channel::<i32>();
+ let (tx3, rx3) = channel::<()>();
+
+ let _t = thread::spawn(move|| {
+ rx3.recv().unwrap();
+ tx1.clone();
+ assert_eq!(rx3.try_recv(), Err(TryRecvError::Empty));
+ tx1.send(2).unwrap();
+ rx3.recv().unwrap();
+ });
+
+ tx3.send(()).unwrap();
+ select! {
+ _i1 = rx1.recv() => {},
+ _i2 = rx2.recv() => panic!()
+ }
+ tx3.send(()).unwrap();
+}
+
+#[test]
+fn cloning3() {
+ let (tx1, rx1) = channel::<()>();
+ let (tx2, rx2) = channel::<()>();
+ let (tx3, rx3) = channel::<()>();
+ let _t = thread::spawn(move|| {
+ let s = Select::new();
+ let mut h1 = s.handle(&rx1);
+ let mut h2 = s.handle(&rx2);
+ unsafe { h2.add(); }
+ unsafe { h1.add(); }
+ assert_eq!(s.wait(), h2.id());
+ tx3.send(()).unwrap();
+ });
+
+ for _ in 0..1000 { thread::yield_now(); }
+ drop(tx1.clone());
+ tx2.send(()).unwrap();
+ rx3.recv().unwrap();
+}
+
+#[test]
+fn preflight1() {
+ let (tx, rx) = channel();
+ tx.send(()).unwrap();
+ select! {
+ _n = rx.recv() => {}
+ }
+}
+
+#[test]
+fn preflight2() {
+ let (tx, rx) = channel();
+ tx.send(()).unwrap();
+ tx.send(()).unwrap();
+ select! {
+ _n = rx.recv() => {}
+ }
+}
+
+#[test]
+fn preflight3() {
+ let (tx, rx) = channel();
+ drop(tx.clone());
+ tx.send(()).unwrap();
+ select! {
+ _n = rx.recv() => {}
+ }
+}
+
+#[test]
+fn preflight4() {
+ let (tx, rx) = channel();
+ tx.send(()).unwrap();
+ let s = Select::new();
+ let mut h = s.handle(&rx);
+ unsafe { h.add(); }
+ assert_eq!(s.wait2(false), h.id());
+}
+
+#[test]
+fn preflight5() {
+ let (tx, rx) = channel();
+ tx.send(()).unwrap();
+ tx.send(()).unwrap();
+ let s = Select::new();
+ let mut h = s.handle(&rx);
+ unsafe { h.add(); }
+ assert_eq!(s.wait2(false), h.id());
+}
+
+#[test]
+fn preflight6() {
+ let (tx, rx) = channel();
+ drop(tx.clone());
+ tx.send(()).unwrap();
+ let s = Select::new();
+ let mut h = s.handle(&rx);
+ unsafe { h.add(); }
+ assert_eq!(s.wait2(false), h.id());
+}
+
+#[test]
+fn preflight7() {
+ let (tx, rx) = channel::<()>();
+ drop(tx);
+ let s = Select::new();
+ let mut h = s.handle(&rx);
+ unsafe { h.add(); }
+ assert_eq!(s.wait2(false), h.id());
+}
+
+#[test]
+fn preflight8() {
+ let (tx, rx) = channel();
+ tx.send(()).unwrap();
+ drop(tx);
+ rx.recv().unwrap();
+ let s = Select::new();
+ let mut h = s.handle(&rx);
+ unsafe { h.add(); }
+ assert_eq!(s.wait2(false), h.id());
+}
+
+#[test]
+fn preflight9() {
+ let (tx, rx) = channel();
+ drop(tx.clone());
+ tx.send(()).unwrap();
+ drop(tx);
+ rx.recv().unwrap();
+ let s = Select::new();
+ let mut h = s.handle(&rx);
+ unsafe { h.add(); }
+ assert_eq!(s.wait2(false), h.id());
+}
+
+#[test]
+fn oneshot_data_waiting() {
+ let (tx1, rx1) = channel();
+ let (tx2, rx2) = channel();
+ let _t = thread::spawn(move|| {
+ select! {
+ _n = rx1.recv() => {}
+ }
+ tx2.send(()).unwrap();
+ });
+
+ for _ in 0..100 { thread::yield_now() }
+ tx1.send(()).unwrap();
+ rx2.recv().unwrap();
+}
+
+#[test]
+fn stream_data_waiting() {
+ let (tx1, rx1) = channel();
+ let (tx2, rx2) = channel();
+ tx1.send(()).unwrap();
+ tx1.send(()).unwrap();
+ rx1.recv().unwrap();
+ rx1.recv().unwrap();
+ let _t = thread::spawn(move|| {
+ select! {
+ _n = rx1.recv() => {}
+ }
+ tx2.send(()).unwrap();
+ });
+
+ for _ in 0..100 { thread::yield_now() }
+ tx1.send(()).unwrap();
+ rx2.recv().unwrap();
+}
+
+#[test]
+fn shared_data_waiting() {
+ let (tx1, rx1) = channel();
+ let (tx2, rx2) = channel();
+ drop(tx1.clone());
+ tx1.send(()).unwrap();
+ rx1.recv().unwrap();
+ let _t = thread::spawn(move|| {
+ select! {
+ _n = rx1.recv() => {}
+ }
+ tx2.send(()).unwrap();
+ });
+
+ for _ in 0..100 { thread::yield_now() }
+ tx1.send(()).unwrap();
+ rx2.recv().unwrap();
+}
+
+#[test]
+fn sync1() {
+ let (tx, rx) = sync_channel::<i32>(1);
+ tx.send(1).unwrap();
+ select! {
+ n = rx.recv() => { assert_eq!(n.unwrap(), 1); }
+ }
+}
+
+#[test]
+fn sync2() {
+ let (tx, rx) = sync_channel::<i32>(0);
+ let _t = thread::spawn(move|| {
+ for _ in 0..100 { thread::yield_now() }
+ tx.send(1).unwrap();
+ });
+ select! {
+ n = rx.recv() => { assert_eq!(n.unwrap(), 1); }
+ }
+}
+
+#[test]
+fn sync3() {
+ let (tx1, rx1) = sync_channel::<i32>(0);
+ let (tx2, rx2): (Sender<i32>, Receiver<i32>) = channel();
+ let _t = thread::spawn(move|| { tx1.send(1).unwrap(); });
+ let _t = thread::spawn(move|| { tx2.send(2).unwrap(); });
+ select! {
+ n = rx1.recv() => {
+ let n = n.unwrap();
+ assert_eq!(n, 1);
+ assert_eq!(rx2.recv().unwrap(), 2);
+ },
+ n = rx2.recv() => {
+ let n = n.unwrap();
+ assert_eq!(n, 2);
+ assert_eq!(rx1.recv().unwrap(), 1);
+ }
+ }
+}
-//! Experimental extensions to `std` for Unix platforms.
+//! Platform-specific extensions to `std` for Unix platforms.
//!
//! Provides access to platform-level information on Unix platforms, and
//! exposes Unix-specific functions that would otherwise be inappropriate as
pub fn read(&self, data: &mut [u8]) -> io::Result<usize> {
let fd = FileDesc::new(libc::STDIN_FILENO);
let ret = fd.read(data);
- fd.into_raw();
+ fd.into_raw(); // do not close this FD
ret
}
}
pub fn write(&self, data: &[u8]) -> io::Result<usize> {
let fd = FileDesc::new(libc::STDOUT_FILENO);
let ret = fd.write(data);
- fd.into_raw();
+ fd.into_raw(); // do not close this FD
ret
}
pub fn write(&self, data: &[u8]) -> io::Result<usize> {
let fd = FileDesc::new(libc::STDERR_FILENO);
let ret = fd.write(data);
- fd.into_raw();
+ fd.into_raw(); // do not close this FD
ret
}
/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(thread_local_internals))]
macro_rules! thread_local {
// empty (base case for the recursion)
() => {};
reason = "should not be necessary",
issue = "0")]
#[macro_export]
-#[allow_internal_unstable]
+#[cfg_attr(stage0, allow_internal_unstable)]
+#[cfg_attr(not(stage0), allow_internal_unstable(
+ thread_local_internals, cfg_target_thread_local, thread_local,
+))]
#[allow_internal_unsafe]
macro_rules! __thread_local_inner {
(@key $(#[$attr:meta])* $vis:vis $name:ident, $t:ty, $init:expr) => {
/// A function-like procedural macro. TokenStream -> TokenStream.
ProcMacro {
expander: Box<dyn ProcMacro + sync::Sync + sync::Send>,
- allow_internal_unstable: bool,
+ /// Whitelist of unstable features that are treated as stable inside this macro
+ allow_internal_unstable: Option<Lrc<[Symbol]>>,
edition: Edition,
},
expander: Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
def_info: Option<(ast::NodeId, Span)>,
/// Whether the contents of the macro can
- /// directly use `#[unstable]` things (true == yes).
- allow_internal_unstable: bool,
+ /// directly use `#[unstable]` things.
+ ///
+ /// Only allows things that require a feature gate in the given whitelist
+ allow_internal_unstable: Option<Lrc<[Symbol]>>,
/// Whether the contents of the macro can use `unsafe`
/// without triggering the `unsafe_code` lint.
allow_internal_unsafe: bool,
/// A function-like syntax extension that has an extra ident before
/// the block.
- ///
- IdentTT(Box<dyn IdentMacroExpander + sync::Sync + sync::Send>, Option<Span>, bool),
+ IdentTT {
+ expander: Box<dyn IdentMacroExpander + sync::Sync + sync::Send>,
+ span: Option<Span>,
+ allow_internal_unstable: Option<Lrc<[Symbol]>>,
+ },
/// An attribute-like procedural macro. TokenStream -> TokenStream.
/// The input is the annotated item.
match *self {
SyntaxExtension::DeclMacro { .. } |
SyntaxExtension::NormalTT { .. } |
- SyntaxExtension::IdentTT(..) |
+ SyntaxExtension::IdentTT { .. } |
SyntaxExtension::ProcMacro { .. } =>
MacroKind::Bang,
SyntaxExtension::NonMacroAttr { .. } |
SyntaxExtension::ProcMacroDerive(.., edition) => edition,
// Unstable legacy stuff
SyntaxExtension::NonMacroAttr { .. } |
- SyntaxExtension::IdentTT(..) |
+ SyntaxExtension::IdentTT { .. } |
SyntaxExtension::MultiDecorator(..) |
SyntaxExtension::MultiModifier(..) |
SyntaxExtension::BuiltinDerive(..) => hygiene::default_edition(),
call_site: span,
def_site: None,
format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("rustc_attrs"),
+ Symbol::intern("structural_match"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
call_site: attr.span,
def_site: None,
format: MacroAttribute(Symbol::intern(&attr.path.to_string())),
- allow_internal_unstable: false,
+ allow_internal_unstable: None,
allow_internal_unsafe: false,
local_inner_macros: false,
edition: ext.edition(),
// don't stability-check macros in the same crate
// (the only time this is null is for syntax extensions registered as macros)
if def_site_span.map_or(false, |def_span| !crate_span.contains(def_span))
- && !span.allows_unstable() && this.cx.ecfg.features.map_or(true, |feats| {
+ && !span.allows_unstable(&feature.as_str())
+ && this.cx.ecfg.features.map_or(true, |feats| {
// macro features will count as lib features
!feats.declared_lib_features.iter().any(|&(feat, _)| feat == feature)
}) {
let opt_expanded = match *ext {
DeclMacro { ref expander, def_info, edition, .. } => {
if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
- false, false, false, None,
+ None, false, false, None,
edition) {
dummy_span
} else {
NormalTT {
ref expander,
def_info,
- allow_internal_unstable,
+ ref allow_internal_unstable,
allow_internal_unsafe,
local_inner_macros,
unstable_feature,
edition,
} => {
if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
- allow_internal_unstable,
+ allow_internal_unstable.clone(),
allow_internal_unsafe,
local_inner_macros,
unstable_feature,
}
}
- IdentTT(ref expander, tt_span, allow_internal_unstable) => {
+ IdentTT { ref expander, span: tt_span, ref allow_internal_unstable } => {
if ident.name == keywords::Invalid.name() {
self.cx.span_err(path.span,
&format!("macro {}! expects an ident argument", path));
call_site: span,
def_site: tt_span,
format: macro_bang_format(path),
- allow_internal_unstable,
+ allow_internal_unstable: allow_internal_unstable.clone(),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
kind.dummy(span)
}
- SyntaxExtension::ProcMacro { ref expander, allow_internal_unstable, edition } => {
+ SyntaxExtension::ProcMacro { ref expander, ref allow_internal_unstable, edition } => {
if ident.name != keywords::Invalid.name() {
let msg =
format!("macro {}! expects no ident argument, given '{}'", path, ident);
def_site: None,
format: macro_bang_format(path),
// FIXME probably want to follow macro_rules macros here.
- allow_internal_unstable,
+ allow_internal_unstable: allow_internal_unstable.clone(),
allow_internal_unsafe: false,
local_inner_macros: false,
edition,
call_site: span,
def_site: None,
format: MacroAttribute(pretty_name),
- allow_internal_unstable: false,
+ allow_internal_unstable: None,
allow_internal_unsafe: false,
local_inner_macros: false,
edition: ext.edition(),
Some(invoc.fragment_kind.expect_from_annotatables(items))
}
BuiltinDerive(func) => {
- expn_info.allow_internal_unstable = true;
+ expn_info.allow_internal_unstable = Some(vec![
+ Symbol::intern("rustc_attrs"),
+ Symbol::intern("derive_clone_copy"),
+ Symbol::intern("derive_eq"),
+ Symbol::intern("libstd_sys_internals"), // RustcDeserialize and RustcSerialize
+ ].into());
invoc.expansion_data.mark.set_expn_info(expn_info);
let span = span.with_ctxt(self.cx.backtrace());
let mut items = Vec::new();
/* __rust_unstable_column!(): expands to the current column number */
pub fn expand_column_gated(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<dyn base::MacResult+'static> {
- if sp.allows_unstable() {
+ if sp.allows_unstable("__rust_unstable_column") {
expand_column(cx, sp, tts)
} else {
cx.span_fatal(sp, "the __rust_unstable_column macro is unstable");
});
if body.legacy {
- let allow_internal_unstable = attr::contains_name(&def.attrs, "allow_internal_unstable");
+ let allow_internal_unstable = attr::find_by_name(&def.attrs, "allow_internal_unstable")
+ .map(|attr| attr
+ .meta_item_list()
+ .map(|list| list.iter()
+ .map(|it| it.name().unwrap_or_else(|| sess.span_diagnostic.span_bug(
+ it.span, "allow internal unstable expects feature names",
+ )))
+ .collect::<Vec<Symbol>>().into()
+ )
+ .unwrap_or_else(|| {
+ sess.span_diagnostic.span_warn(
+ attr.span, "allow_internal_unstable expects list of feature names. In the \
+ future this will become a hard error. Please use `allow_internal_unstable(\
+ foo, bar)` to only allow the `foo` and `bar` features",
+ );
+ vec![Symbol::intern("allow_internal_unstable_backcompat_hack")].into()
+ })
+ );
let allow_internal_unsafe = attr::contains_name(&def.attrs, "allow_internal_unsafe");
let mut local_inner_macros = false;
if let Some(macro_export) = attr::find_by_name(&def.attrs, "macro_export") {
stable",
cfg_fn!(profiler_runtime))),
- ("allow_internal_unstable", Normal, template!(Word), Gated(Stability::Unstable,
+ ("allow_internal_unstable", Normal, template!(Word, List: "feat1, feat2, ..."),
+ Gated(Stability::Unstable,
"allow_internal_unstable",
EXPLAIN_ALLOW_INTERNAL_UNSTABLE,
cfg_fn!(allow_internal_unstable))),
("proc_macro", Normal, template!(Word), Ungated),
("rustc_proc_macro_decls", Normal, template!(Word), Gated(Stability::Unstable,
- "rustc_proc_macro_decls",
+ "rustc_attrs",
"used internally by rustc",
cfg_fn!(rustc_attrs))),
pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) {
let (cfg, feature, has_feature) = GATED_CFGS[self.index];
- if !has_feature(features) && !self.span.allows_unstable() {
+ if !has_feature(features) && !self.span.allows_unstable(feature) {
let explain = format!("`cfg({})` is experimental and subject to change", cfg);
emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain);
}
name, explain, level) = ($cx, $has_feature, $span, $name, $explain, $level);
let has_feature: bool = has_feature(&$cx.features);
debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature);
- if !has_feature && !span.allows_unstable() {
+ if !has_feature && !span.allows_unstable($name) {
leveled_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain, level)
.emit();
}
for &(n, ty, _template, ref gateage) in BUILTIN_ATTRIBUTES {
if name == n {
if let Gated(_, name, desc, ref has_feature) = *gateage {
- gate_feature_fn!(self, has_feature, attr.span, name, desc, GateStrength::Hard);
+ if !attr.span.allows_unstable(name) {
+ gate_feature_fn!(
+ self, has_feature, attr.span, name, desc, GateStrength::Hard
+ );
+ }
} else if name == "doc" {
if let Some(content) = attr.meta_item_list() {
if content.iter().any(|c| c.check_name("include")) {
macro_rules! gate_feature_post {
($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{
let (cx, span) = ($cx, $span);
- if !span.allows_unstable() {
+ if !span.allows_unstable(stringify!($feature)) {
gate_feature!(cx.context, $feature, span, $explain)
}
}};
($cx: expr, $feature: ident, $span: expr, $explain: expr, $level: expr) => {{
let (cx, span) = ($cx, $span);
- if !span.allows_unstable() {
+ if !span.allows_unstable(stringify!($feature)) {
gate_feature!(cx.context, $feature, span, $explain, $level)
}
}}
impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
fn visit_attribute(&mut self, attr: &ast::Attribute) {
- if !attr.span.allows_unstable() {
- // check for gated attributes
- self.context.check_attribute(attr, false);
- }
+ // check for gated attributes
+ self.context.check_attribute(attr, false);
if attr.check_name("doc") {
if let Some(content) = attr.meta_item_list() {
#![feature(crate_visibility_modifier)]
#![feature(label_break_value)]
+#![feature(nll)]
#![feature(rustc_attrs)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_sort_by_cached_key)]
call_site: DUMMY_SP,
def_site: None,
format: MacroAttribute(Symbol::intern("std_inject")),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("prelude_import"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
call_site: DUMMY_SP,
def_site: None,
format: MacroAttribute(Symbol::intern("test_case")),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("main"),
+ Symbol::intern("test"),
+ Symbol::intern("rustc_attrs"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
intrinsic: &str,
args: Vec<P<ast::Expr>>)
-> P<ast::Expr> {
- if cx.current_expansion.mark.expn_info().unwrap().allow_internal_unstable {
+ let intrinsic_allowed_via_allow_internal_unstable = cx
+ .current_expansion.mark.expn_info().unwrap()
+ .allow_internal_unstable.map_or(false, |features| features.iter().any(|&s|
+ s == "core_intrinsics"
+ ));
+ if intrinsic_allowed_via_allow_internal_unstable {
span = span.with_ctxt(cx.backtrace());
} else { // Avoid instability errors with user defined curstom derives, cc #36316
let mut info = cx.current_expansion.mark.expn_info().unwrap();
- info.allow_internal_unstable = true;
+ info.allow_internal_unstable = Some(vec![Symbol::intern("core_intrinsics")].into());
let mark = Mark::fresh(Mark::root());
mark.set_expn_info(info);
span = span.with_ctxt(SyntaxContext::empty().apply_mark(mark));
//if !ecx.ecfg.enable_allow_internal_unstable() {
// For some reason, the only one that actually works for `println` is the first check
- if !sp.allows_unstable() // the enclosing span is marked as `#[allow_insternal_unsable]`
+ if !sp.allows_unstable("format_args_nl") // the span is marked as `#[allow_insternal_unsable]`
&& !ecx.ecfg.enable_allow_internal_unstable() // NOTE: when is this enabled?
&& !ecx.ecfg.enable_format_args_nl() // enabled using `#[feature(format_args_nl]`
{
#![feature(proc_macro_internals)]
#![feature(proc_macro_span)]
#![feature(decl_macro)]
+#![feature(nll)]
#![feature(str_escape)]
#![feature(rustc_diagnostic_macros)]
NormalTT {
expander: Box::new($f as MacroExpanderFn),
def_info: None,
- allow_internal_unstable: false,
+ allow_internal_unstable: None,
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
NormalTT {
expander: Box::new(format::expand_format_args),
def_info: None,
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("fmt_internals"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
NormalTT {
expander: Box::new(format::expand_format_args_nl),
def_info: None,
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("fmt_internals"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
call_site: DUMMY_SP,
def_site: None,
format: MacroAttribute(Symbol::intern("proc_macro")),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("rustc_attrs"),
+ Symbol::intern("proc_macro_internals"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
call_site: DUMMY_SP,
def_site: None,
format: MacroAttribute(Symbol::intern("test")),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("rustc_attrs"),
+ Symbol::intern("test"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
call_site: DUMMY_SP,
def_site: None,
format: MacroAttribute(Symbol::intern("test_case")),
- allow_internal_unstable: true,
+ allow_internal_unstable: Some(vec![
+ Symbol::intern("test"),
+ Symbol::intern("rustc_attrs"),
+ ].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
edition: hygiene::default_edition(),
use serialize::{Encodable, Decodable, Encoder, Decoder};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::sync::Lrc;
use std::{fmt, mem};
/// A SyntaxContext represents a chain of macro expansions (represented by marks).
pub def_site: Option<Span>,
/// The format with which the macro was invoked.
pub format: ExpnFormat,
- /// Whether the macro is allowed to use #[unstable]/feature-gated
- /// features internally without forcing the whole crate to opt-in
+ /// List of #[unstable]/feature-gated features that the macro is allowed to use
+ /// internally without forcing the whole crate to opt-in
/// to them.
- pub allow_internal_unstable: bool,
+ pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
/// Whether the macro is allowed to use `unsafe` internally
/// even if the user crate has `#![forbid(unsafe_code)]`.
pub allow_internal_unsafe: bool,
#![feature(const_fn)]
#![feature(crate_visibility_modifier)]
#![feature(custom_attribute)]
+#![feature(nll)]
#![feature(non_exhaustive)]
#![feature(optin_builtin_traits)]
#![feature(rustc_attrs)]
/// Checks if a span is "internal" to a macro in which `#[unstable]`
/// items can be used (that is, a macro marked with
/// `#[allow_internal_unstable]`).
- pub fn allows_unstable(&self) -> bool {
+ pub fn allows_unstable(&self, feature: &str) -> bool {
match self.ctxt().outer().expn_info() {
- Some(info) => info.allow_internal_unstable,
+ Some(info) => info
+ .allow_internal_unstable
+ .map_or(false, |features| features.iter().any(|&f|
+ f == feature || f == "allow_internal_unstable_backcompat_hack"
+ )),
None => false,
}
}
authors = ["The Rust Project Developers"]
name = "term"
version = "0.0.0"
+edition = "2018"
[lib]
name = "term"
test(attr(deny(warnings))))]
#![deny(missing_docs)]
+#![deny(rust_2018_idioms)]
+
#![cfg_attr(windows, feature(libc))]
// Handle rustfmt skips
#![feature(custom_attribute)]
-#![feature(nll)]
#![allow(unused_attributes)]
use std::io::prelude::*;
+use std::io::{self, Stdout, Stderr};
pub use terminfo::TerminfoTerminal;
#[cfg(windows)]
pub use win::WinConsole;
-use std::io::{self, Stdout, Stderr};
-
pub mod terminfo;
#[cfg(windows)]
use std::error;
use std::fmt;
use std::fs::File;
-use std::io::prelude::*;
-use std::io;
-use std::io::BufReader;
+use std::io::{self, prelude::*, BufReader};
use std::path::Path;
-use Attr;
-use color;
-use Terminal;
-use self::searcher::get_dbpath_for_term;
-use self::parser::compiled::{parse, msys_terminfo};
-use self::parm::{expand, Variables, Param};
+use crate::Attr;
+use crate::color;
+use crate::Terminal;
+use searcher::get_dbpath_for_term;
+use parser::compiled::{parse, msys_terminfo};
+use parm::{expand, Variables, Param};
/// A parsed terminfo database entry.
#[derive(Debug)]
}
fn cause(&self) -> Option<&dyn error::Error> {
- use self::Error::*;
+ use Error::*;
match *self {
IoError(ref e) => Some(e),
_ => None,
}
impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- use self::Error::*;
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ use Error::*;
match *self {
TermUnset => Ok(()),
MalformedTerminfo(ref e) => e.fmt(f),
/// Container for static and dynamic variable arrays
pub struct Variables {
/// Static variables A-Z
- sta: [Param; 26],
+ sta_va: [Param; 26],
/// Dynamic variables a-z
- dyn: [Param; 26],
+ dyn_va: [Param; 26],
}
impl Variables {
/// Returns a new zero-initialized Variables
pub fn new() -> Variables {
Variables {
- sta: [Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0)],
- dyn: [Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
- Number(0), Number(0), Number(0), Number(0), Number(0)],
+ sta_va: [
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0)
+ ],
+ dyn_va: [
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0),
+ Number(0), Number(0), Number(0), Number(0), Number(0)
+ ],
}
}
}
if cur >= 'A' && cur <= 'Z' {
if let Some(arg) = stack.pop() {
let idx = (cur as u8) - b'A';
- vars.sta[idx as usize] = arg;
+ vars.sta_va[idx as usize] = arg;
} else {
return Err("stack is empty".to_string());
}
} else if cur >= 'a' && cur <= 'z' {
if let Some(arg) = stack.pop() {
let idx = (cur as u8) - b'a';
- vars.dyn[idx as usize] = arg;
+ vars.dyn_va[idx as usize] = arg;
} else {
return Err("stack is empty".to_string());
}
GetVar => {
if cur >= 'A' && cur <= 'Z' {
let idx = (cur as u8) - b'A';
- stack.push(vars.sta[idx as usize].clone());
+ stack.push(vars.sta_va[idx as usize].clone());
} else if cur >= 'a' && cur <= 'z' {
let idx = (cur as u8) - b'a';
- stack.push(vars.dyn[idx as usize].clone());
+ stack.push(vars.dyn_va[idx as usize].clone());
} else {
return Err("bad variable name in %g".to_string());
}
//! ncurses-compatible compiled terminfo format parsing (term(5))
use std::collections::HashMap;
-use std::io::prelude::*;
use std::io;
+use std::io::prelude::*;
use super::super::TermInfo;
// These are the orders ncurses uses in its compiled format (as of 5.9). Not sure if portable.
#[rustfmt_skip]
-pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin",
+pub static boolfnames: &[&str] = &["auto_left_margin", "auto_right_margin",
"no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type",
"hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above",
"memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok",
"return_does_clr_eol"];
#[rustfmt_skip]
-pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
+pub static boolnames: &[&str] = &["bw", "am", "xsb", "xhp", "xenl", "eo",
"gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon",
"nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy",
"xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"];
#[rustfmt_skip]
-pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines",
+pub static numfnames: &[&str] = &[ "columns", "init_tabs", "lines",
"lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal",
"width_status_line", "num_labels", "label_height", "label_width", "max_attributes",
"maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity",
"new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"];
#[rustfmt_skip]
-pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
+pub static numnames: &[&str] = &[ "cols", "it", "lines", "lm", "xmc", "pb",
"vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv",
"spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs",
"btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"];
#[rustfmt_skip]
-pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return",
+pub static stringfnames: &[&str] = &[ "back_tab", "bell", "carriage_return",
"change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos",
"column_address", "command_character", "cursor_address", "cursor_down", "cursor_home",
"cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right",
"acs_plus", "memory_lock", "memory_unlock", "box_chars_1"];
#[rustfmt_skip]
-pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
+pub static stringnames: &[&str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear",
"_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1",
"ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc",
"dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc",
use std::io;
use std::io::prelude::*;
-use Attr;
-use color;
-use Terminal;
+use crate::Attr;
+use crate::color;
+use crate::Terminal;
/// A Terminal implementation that uses the Win32 Console API.
pub struct WinConsole<T> {
#![deny(rust_2018_idioms)]
#![feature(link_cfg)]
+#![feature(nll)]
#![feature(staged_api)]
#![feature(unwind_attributes)]
#![feature(static_nobundle)]
-include ../tools.mk
+# ignore-macos
+#
# This hits an assertion in the linker on older versions of osx apparently
-ifeq ($(shell uname),Darwin)
-all:
- echo ignored
-else
+
all: $(call DYLIB,cfoo)
$(RUSTC) foo.rs -C prefer-dynamic
$(RUSTC) bar.rs
$(call RUN,bar)
$(call REMOVE_DYLIBS,cfoo)
$(call FAIL,bar)
-endif
-include ../tools.mk
+# ignore-macos
+#
+# This hits an assertion in the linker on older versions of osx apparently
+
# This overrides the LD_LIBRARY_PATH for RUN
TARGET_RPATH_DIR:=$(TARGET_RPATH_DIR):$(TMPDIR)
-# This hits an assertion in the linker on older versions of osx apparently
-ifeq ($(shell uname),Darwin)
-all:
- echo ignored
-else
all: $(call DYLIB,cfoo)
$(RUSTC) foo.rs
$(RUSTC) bar.rs
$(call RUN,bar)
$(call REMOVE_DYLIBS,cfoo)
$(call FAIL,bar)
-endif
-include ../tools.mk
-# FIXME: ignore freebsd
-ifneq ($(shell uname),FreeBSD)
+# ignore-freebsd
+# FIXME
+
all:
$(RUSTC) foo.rs
$(CC) bar.c $(call STATICLIB,foo) $(call OUT_EXE,bar) \
$(call RUN,bar)
rm $(call STATICLIB,foo)
$(call RUN,bar)
-
-else
-all:
-
-endif
-include ../tools.mk
+# ignore-windows
# FIXME: The __rdl_ and __rust_ symbol still remains, no matter using MSVC or GNU
# See https://github.com/rust-lang/rust/pull/46207#issuecomment-347561753
-ifdef IS_WINDOWS
-all:
- true
-else
+
all:
$(RUSTC) foo.rs
nm -g "$(call DYLIB,foo)" | $(CGREP) -v __rdl_ __rde_ __rg_ __rust_
-endif
-include ../tools.mk
-ifneq (,$(findstring MINGW,$(UNAME)))
-ifndef IS_MSVC
+# only-mingw
+
all:
$(CXX) foo.cpp -c -o $(TMPDIR)/foo.o
$(AR) crus $(TMPDIR)/libfoo.a $(TMPDIR)/foo.o
$(RUSTC) foo.rs -lfoo -lstdc++
$(call RUN,foo)
-else
-all:
-
-endif
-else
-all:
-
-endif
-include ../tools.mk
-# FIXME: ignore freebsd/windows
-# (windows: see `../dep-info/Makefile`)
-ifneq ($(shell uname),FreeBSD)
-ifndef IS_WINDOWS
+# ignore-windows
+# ignore-freebsd
+# FIXME: (windows: see `../dep-info/Makefile`)
+
all:
cp lib.rs $(TMPDIR)/
cp 'foo foo.rs' $(TMPDIR)/
pwd
$(MAKE) -drf Makefile.foo
rm $(TMPDIR)/done && exit 1 || exit 0
-else
-all:
-
-endif
-
-else
-all:
-
-endif
-include ../tools.mk
-# FIXME: ignore freebsd/windows
-# on windows `rustc --dep-info` produces Makefile dependency with
+# ignore-windows
+# ignore-freebsd
+# FIXME: on windows `rustc --dep-info` produces Makefile dependency with
# windows native paths (e.g. `c:\path\to\libfoo.a`)
# but msys make seems to fail to recognize such paths, so test fails.
-ifneq ($(shell uname),FreeBSD)
-ifndef IS_WINDOWS
+
all:
cp *.rs $(TMPDIR)
$(RUSTC) --emit dep-info,link --crate-type=lib $(TMPDIR)/lib.rs
rm $(TMPDIR)/bar.rs
cp $(TMPDIR)/lib2.rs $(TMPDIR)/lib.rs
$(MAKE) -drf Makefile.foo
-else
-all:
-
-endif
-
-else
-all:
-
-endif
-include ../tools.mk
+# ignore-windows
+# ignore-macos
+# min-llvm-version 6.0
+#
# This feature only works when the output object format is ELF so we ignore
# macOS and Windows
-ifdef IS_WINDOWS
-# Do nothing on Windows.
-all:
- exit 0
-else ifneq (,$(filter $(TARGET),i686-apple-darwin x86_64-apple-darwin))
-# Do nothing on macOS.
-all:
- exit 0
-else
+
# check that the .stack_sizes section is generated
-# this test requires LLVM >= 6.0.0
-vers = $(shell $(RUSTC) -Vv)
-ifneq (,$(findstring LLVM version: 3,$(vers)))
-all:
- exit 0
-else ifneq (,$(findstring LLVM version: 4,$(vers)))
-all:
- exit 0
-else ifneq (,$(findstring LLVM version: 5,$(vers)))
-all:
- exit 0
-else
all:
$(RUSTC) -C opt-level=3 -Z emit-stack-sizes --emit=obj foo.rs
size -A $(TMPDIR)/foo.o | $(CGREP) .stack_sizes
-endif
-endif
-include ../tools.mk
+# ignore-windows
+# ignore-macos
+
# Test for #39529.
# `-z text` causes ld to error if there are any non-PIC sections
-ifeq ($(UNAME),Darwin)
-all:
-else ifdef IS_WINDOWS
-all:
-else
all:
$(RUSTC) hello.rs -C link-args=-Wl,-z,text
-endif
-include ../tools.mk
-# FIXME: ignore freebsd/windows
-# on windows `rustc --dep-info` produces Makefile dependency with
+# ignore-windows
+# ignore-freebsd
+# FIXME: on windows `rustc --dep-info` produces Makefile dependency with
# windows native paths (e.g. `c:\path\to\libfoo.a`)
# but msys make seems to fail to recognize such paths, so test fails.
-ifneq ($(shell uname),FreeBSD)
-ifndef IS_WINDOWS
+
all:
$(RUSTC) --emit dep-info main.rs
$(CGREP) "input.txt" "input.bin" "input.md" < $(TMPDIR)/main.d
-else
-all:
-
-endif
-
-else
-all:
-
-endif
-include ../tools.mk
-ifndef IS_WINDOWS
-# The assembly for exit-unreachable.rs should be shorter because it's missing
-# (at minimum) a return instruction.
+# ignore-windows
+#
+# Because of Windows exception handling, the code is not necessarily any shorter.
+# https://github.com/llvm-mirror/llvm/commit/64b2297786f7fd6f5fa24cdd4db0298fbf211466
all:
$(RUSTC) -O --emit asm exit-ret.rs
$(RUSTC) -O --emit asm exit-unreachable.rs
test `wc -l < $(TMPDIR)/exit-unreachable.s` -lt `wc -l < $(TMPDIR)/exit-ret.s`
-else
-# Because of Windows exception handling, the code is not necessarily any shorter.
-# https://github.com/llvm-mirror/llvm/commit/64b2297786f7fd6f5fa24cdd4db0298fbf211466
-all:
-endif
-include ../tools.mk
-ifeq ($(UNAME),Linux)
+# only-linux
+
all:
$(RUSTC) foo.rs
$(CC) foo.c -lfoo -L $(TMPDIR) -Wl,--gc-sections -lpthread -ldl -o $(TMPDIR)/foo
$(call RUN,foo)
$(CC) foo.c -lfoo -L $(TMPDIR) -Wl,--gc-sections -lpthread -ldl -pie -fPIC -o $(TMPDIR)/foo
$(call RUN,foo)
-else
-all:
-endif
-include ../tools.mk
+# ignore-windows
+
OUT := $(TMPDIR)/out
-ifndef IS_WINDOWS
all: time
time: libc
libc:
mkdir -p $(OUT)/libc
$(RUSTC) in/libc/lib.rs --crate-name=libc -Cmetadata=foo -o $(OUT)/libc/liblibc.rlib
-else
-all:
-endif
-include ../tools.mk
-ifeq (musl,$(findstring musl,$(TARGET)))
-all: skip
-else
-all: test
-endif
+# ignore-musl
-test: foo
+all: foo
$(call RUN,foo)
-skip:
- echo "expected failure"
-
foo: foo.rs $(call NATIVE_STATICLIB,foo)
$(RUSTC) $< -lfoo $(EXTRACXXFLAGS)
$(TMPDIR)/libfoo.o: foo.cpp
$(call COMPILE_OBJ_CXX,$@,$<)
-
-.PHONY: all test skip
-include ../tools.mk
-ifeq ($(findstring stage1,$(RUST_BUILD_STAGE)),stage1)
-# ignore stage1
-all:
+# ignore-stage1
-else
all:
$(RUSTC) a.rs && $(RUSTC) b.rs
$(BARE_RUSTC) c.rs -L dependency=$(TMPDIR) --extern b=$(TMPDIR)/libb.rlib \
--out-dir=$(TMPDIR)
-endif
-include ../tools.mk
-ifeq ($(findstring stage1,$(RUST_BUILD_STAGE)),stage1)
-# ignore stage1
-all:
+# ignore-stage1
-else
all:
$(RUSTC) a.rs && $(RUSTC) b.rs && $(RUSTC) c.rs
-endif
-include ../tools.mk
-ifdef IS_WINDOWS
-# Do nothing on MSVC.
-all:
- exit 0
-else
+# ignore-windows
+
all:
$(RUSTC) --emit=obj app.rs
nm $(TMPDIR)/app.o | $(CGREP) rust_begin_unwind
nm $(TMPDIR)/app.o | $(CGREP) rust_eh_personality
nm $(TMPDIR)/app.o | $(CGREP) rust_oom
-endif
-include ../tools.mk
-ifeq ($(if $(IS_WINDOWS),$(IS_MSVC),no),)
+# only-mingw
all: empty.rs
cp -r $(shell cygpath -u $(shell $(RUSTC) --print sysroot)) $(TMPDIR)/sysroot
mkdir -p $(TMPDIR)/obj
mv $(TMPDIR)/sysroot/lib/rustlib/$(TARGET)/lib/crt2.o $(TMPDIR)/obj/crt2.o
$(RUSTC) --target $(TARGET) --sysroot $(TMPDIR)/sysroot -L$(TMPDIR)/obj -Z print-link-args empty.rs | $(CGREP) 'obj\\crt2.o'
-
-else
-
-all:
-
-endif
-include ../tools.mk
-ifdef IS_WINDOWS
-all:
-else
+# ignore-windows
NAME := $(shell $(RUSTC) --print file-names foo.rs)
$(RUSTC) foo.rs -o $(TMPDIR)/outdir/$(NAME)
ln -nsf outdir/$(NAME) $(TMPDIR)
RUST_LOG=rustc_metadata::loader $(RUSTC) bar.rs
-endif
# Make sure we don't ICE if the linker prints a non-UTF-8 error message.
-# Ignore Windows and Apple
-
+# ignore-windows
+#
# This does not work in its current form on windows, possibly due to
# gcc bugs or something about valid Windows paths. See issue #29151
# for more information.
-ifndef IS_WINDOWS
+# ignore-macos
+#
# This also does not work on Apple APFS due to the filesystem requiring
# valid UTF-8 paths.
-ifneq ($(shell uname),Darwin)
# The zzz it to allow humans to tab complete or glob this thing.
bad_dir := $(TMPDIR)/zzz$$'\xff'
mkdir $(bad_dir)
mv $(TMPDIR)/liblibrary.a $(bad_dir)
LIBRARY_PATH=$(bad_dir) $(RUSTC) exec.rs 2>&1 | $(CGREP) this_symbol_not_defined
-else
-all:
-
-endif
-
-else
-all:
-
-endif
-include ../tools.mk
+# only-linux
+
# This tests ensure that global variables respect the target minimum alignment.
# The three bools `STATIC_BOOL`, `STATIC_MUT_BOOL`, and `CONST_BOOL` all have
# type-alignment of 1, but some targets require greater global alignment.
LL = $(TMPDIR)/min_global_align.ll
all:
-ifeq ($(UNAME),Linux)
# Most targets are happy with default alignment -- take i686 for example.
ifeq ($(filter x86,$(LLVM_COMPONENTS)),x86)
$(RUSTC) --target=i686-unknown-linux-gnu --emit=llvm-ir $(SRC)
$(RUSTC) --target=s390x-unknown-linux-gnu --emit=llvm-ir $(SRC)
[ "$$(grep -c 'align 2' "$(LL)")" -eq "3" ]
endif
-endif
-include ../tools.mk
+# only-linux
+# only-x86_64
+
all:
-ifeq ($(TARGET),x86_64-unknown-linux-gnu)
$(RUSTC) hello.rs -C no_integrated_as
$(call RUN,hello)
-endif
-include ../tools.mk
+# ignore-windows
+
all:
ifeq ($(PROFILER_SUPPORT),1)
-ifndef IS_WINDOWS
$(RUSTC) -Copt-level=3 -Clto=fat -Z pgo-gen="$(TMPDIR)/test.profraw" test.rs
$(call RUN,test) || exit 1
[ -e "$(TMPDIR)/test.profraw" ] || (echo "No .profraw file"; exit 1)
endif
-endif
-include ../tools.mk
+# ignore-windows
+
all:
ifeq ($(PROFILER_SUPPORT),1)
-ifndef IS_WINDOWS
$(RUSTC) -g -Z pgo-gen="$(TMPDIR)/test.profraw" test.rs
$(call RUN,test) || exit 1
[ -e "$(TMPDIR)/test.profraw" ] || (echo "No .profraw file"; exit 1)
endif
-endif
-include ../tools.mk
-ifdef IS_WINDOWS
-# ignore windows
-RUSTC_FLAGS =
-else
+
+# ignore-windows
+
# Notice the space in the end, this emulates the output of pkg-config
RUSTC_FLAGS = -C link-args="-lc "
-endif
all:
$(RUSTC) $(RUSTC_FLAGS) empty.rs
-include ../tools.mk
+# only-linux
+#
# This tests the different -Zrelro-level values, and makes sure that they work properly.
all:
-ifeq ($(UNAME),Linux)
# Ensure that binaries built with the full relro level links them with both
# RELRO and BIND_NOW for doing eager symbol resolving.
$(RUSTC) -Zrelro-level=full hello.rs
# enabled by default.
$(RUSTC) -Zrelro-level=off hello.rs
! readelf -l $(TMPDIR)/hello | grep -q GNU_RELRO
-endif
# while generating files. Ideally this would be a rustdoc-ui test, so we could
# verify the error message as well.
-OUTPUT_DIR := "$(TMPDIR)/rustdoc-io-error"
+# ignore-windows
+# The test uses `chmod`.
-# Ignore Windows: the test uses `chmod`.
-ifndef IS_WINDOWS
+OUTPUT_DIR := "$(TMPDIR)/rustdoc-io-error"
# This test operates by creating a temporary directory and modifying its
# permissions so that it is not writable. We have to take special care to set
-$(shell $(RUSTDOC) -o $(OUTPUT_DIR) foo.rs)
chmod u+w $(OUTPUT_DIR)
exit $($(.SHELLSTATUS) -eq 1)
-
-else
-all:
-
-endif
-include ../tools.mk
+# only-linux
+# only-x86_64
+# ignore-test
# FIXME(#46126) ThinLTO for libstd broke this test
-ifeq (1,0)
+
all:
-ifeq ($(TARGET),x86_64-unknown-linux-gnu)
ifdef SANITIZER_SUPPORT
$(RUSTC) -C opt-level=1 -g -Z sanitizer=leak -Z print-link-args leak.rs | $(CGREP) librustc_lsan
$(TMPDIR)/leak 2>&1 | $(CGREP) 'detected memory leaks'
endif
-endif
-
-else
-all:
-endif
-
-include ../tools.mk
+# only-linux
+# only-x86_64
+
all:
-ifeq ($(TARGET),x86_64-unknown-linux-gnu)
ifdef SANITIZER_SUPPORT
$(RUSTC) -g -Z sanitizer=memory -Z print-link-args uninit.rs | $(CGREP) librustc_msan
$(TMPDIR)/uninit 2>&1 | $(CGREP) use-of-uninitialized-value
endif
-endif
-
include ../tools.mk
-ifdef IS_WINDOWS
-# Do nothing on MSVC.
+# ignore-windows
+#
# On MINGW the --version-script, --dynamic-list, and --retain-symbol args don't
# seem to work reliably.
-all:
- exit 0
-else
NM=nm -D
CDYLIB_NAME=liba_cdylib.so
# Check that an executable does not export any dynamic symbols
[ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_c_function_from_rlib)" -eq "0" ]
[ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_rust_function_from_exe)" -eq "0" ]
-endif
-include ../tools.mk
-# ignore windows: `ln` is actually `cp` on msys.
-ifndef IS_WINDOWS
+# ignore-windows
+# `ln` is actually `cp` on msys.
all:
$(RUSTC) foo.rs
ln -nsf $(TMPDIR)/libfoo.rlib $(TMPDIR)/other
$(RUSTC) bar.rs -L $(TMPDIR)
$(RUSTC) baz.rs --extern foo=$(TMPDIR)/other/libfoo.rlib -L $(TMPDIR)
-
-else
-all:
-
-endif
-include ../tools.mk
-# ignore windows: `ln` is actually `cp` on msys.
-ifndef IS_WINDOWS
+# ignore-windows
+# `ln` is actually `cp` on msys.
all:
$(RUSTC) foo.rs -C prefer-dynamic
mkdir -p $(TMPDIR)/other
ln -nsf $(TMPDIR)/$(call DYLIB_GLOB,foo) $(TMPDIR)/other
$(RUSTC) bar.rs -L $(TMPDIR)/other
-
-else
-all:
-
-endif
-include ../tools.mk
-# ignore windows: `ln` is actually `cp` on msys.
-ifndef IS_WINDOWS
+# ignore-windows
+# `ln` is actually `cp` on msys.
all:
$(RUSTC) foo.rs --crate-type=rlib -o $(TMPDIR)/foo.xxx
ln -nsf $(TMPDIR)/foo.xxx $(TMPDIR)/libfoo.rlib
$(RUSTC) bar.rs -L $(TMPDIR)
-
-else
-all:
-
-endif
-include ../tools.mk
+# only-linux
+# only-x86_64
+#
# I *really* don't want to deal with a cross-platform way to compare file sizes,
# tests in `make` sort of are awful
-ifeq ($(TARGET),x86_64-unknown-linux-gnu)
+
all: $(TMPDIR)/out.log
# Make sure no warnings about "unknown CPU `native`" were emitted
if [ "$$(wc -c $(TMPDIR)/out.log | cut -d' ' -f 1)" = "0" ]; then \
else \
exit 1; \
fi
-else
-all: $(TMPDIR)/out.log
-endif
$(TMPDIR)/out.log:
-include ../tools.mk
-SKIP_OS := 'FreeBSD OpenBSD Bitrig SunOS'
-
-ifneq ($(UNAME),$(findstring $(UNAME),$(SKIP_OS)))
+# ignore-freebsd
+# ignore-openbsd
+# ignore-bitrig
+# ignore-sunos
HOST := $(shell $(RUSTC) -vV | grep 'host:' | sed 's/host: //')
ifeq ($(findstring i686,$(HOST)),i686)
$(RUSTC) foo.rs -C extra-filename=-host
$(RUSTC) bar.rs -C extra-filename=-targ --target $(TARGET)
$(RUSTC) baz.rs --extern a=$(TMPDIR)/liba-targ.rlib --target $(TARGET)
-else
-# FreeBSD, OpenBSD, and Bitrig support only x86_64 architecture for now
-all:
-endif
-include ../tools.mk
-ifdef IS_WINDOWS
-# Do nothing on MSVC.
-all:
- exit 0
-else
+# ignore-windows
+
all:
$(RUSTC) -C opt-level=3 --emit=obj used.rs
nm $(TMPDIR)/used.o | $(CGREP) FOO
-endif
-include ../tools.mk
-ifdef IS_WINDOWS
+# only-windows
all:
$(RUSTC) -o "$(TMPDIR)/hopefullydoesntexist bar.exe" hello.rs
$(RUSTC) spawn.rs
$(TMPDIR)/spawn.exe
-
-else
-
-all:
-
-endif
# - thumbv7em-none-eabihf (Bare Cortex-M4F, M7F, FPU, hardfloat)
# - thumbv7m-none-eabi (Bare Cortex-M3)
-# See https://stackoverflow.com/questions/7656425/makefile-ifeq-logical-or
-ifneq (,$(filter $(TARGET),thumbv6m-none-eabi thumbv7em-none-eabi thumbv7em-none-eabihf thumbv7m-none-eabi))
+# only-thumbv6m-none-eabi
+# only-thumbv7em-none-eabi
+# only-thumbv7em-none-eabihf
+# only-thumbv7m-none-eabi
# For cargo setting
RUSTC := $(RUSTC_ORIGINAL)
# These come from the top-level Rust workspace, that this crate is not a
# member of, but Cargo tries to load the workspace `Cargo.toml` anyway.
cd $(WORK_DIR) && cd $(CRATE) && env RUSTC_BOOTSTRAP=1 $(CARGO) build --target $(TARGET) -v
-else
-
-all:
-
-endif
-include ../../run-make-fulldeps/tools.mk
+# only-thumbv7m-none-eabi
+# only-thumbv6m-none-eabi
+
# How to run this
# $ ./x.py clean
# $ ./x.py test --target thumbv7m-none-eabi src/test/run-make
-ifneq (,$(filter $(TARGET),thumbv6m-none-eabi thumbv7m-none-eabi))
-
# For cargo setting
export RUSTC := $(RUSTC_ORIGINAL)
export LD_LIBRARY_PATH := $(HOST_RPATH_DIR)
all:
bash script.sh
-else
-all:
-endif
-include ../../run-make-fulldeps/tools.mk
-ifeq ($(TARGET),wasm32-unknown-unknown)
+# only-wasm32
+
all:
$(RUSTC) foo.rs --target wasm32-unknown-unknown
$(RUSTC) bar.rs -C lto -O --target wasm32-unknown-unknown
$(NODE) foo.js $(TMPDIR)/bar.wasm
-else
-all:
-endif
-include ../../run-make-fulldeps/tools.mk
-ifeq ($(TARGET),wasm32-unknown-unknown)
+# only-wasm32
+
all:
$(RUSTC) foo.rs -O --target wasm32-unknown-unknown
$(NODE) foo.js $(TMPDIR)/foo.wasm
-else
-all:
-endif
-include ../../run-make-fulldeps/tools.mk
-ifeq ($(TARGET),wasm32-unknown-unknown)
+# only-wasm32
+
all:
$(RUSTC) bar.rs --target wasm32-unknown-unknown
$(RUSTC) foo.rs --target wasm32-unknown-unknown
$(NODE) verify.js $(TMPDIR)/foo.wasm
$(RUSTC) foo.rs --target wasm32-unknown-unknown -C lto
$(NODE) verify.js $(TMPDIR)/foo.wasm
-else
-all:
-endif
-
-include ../../run-make-fulldeps/tools.mk
-ifeq ($(TARGET),wasm32-unknown-unknown)
+ # only-wasm32
+
all:
$(RUSTC) foo.rs --target wasm32-unknown-unknown
$(RUSTC) bar.rs -C lto -O --target wasm32-unknown-unknown
$(NODE) foo.js $(TMPDIR)/bar.wasm
-else
-all:
-endif
-include ../../run-make-fulldeps/tools.mk
-ifeq ($(TARGET),wasm32-unknown-unknown)
+# only-wasm32
+
all:
$(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg a
wc -c < $(TMPDIR)/foo.wasm
$(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown --cfg d
wc -c < $(TMPDIR)/foo.wasm
[ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "5120" ]
-else
-all:
-endif
-
-include ../../run-make-fulldeps/tools.mk
-ifeq ($(TARGET),wasm32-unknown-unknown)
+# only-wasm32
+
all:
$(RUSTC) foo.rs --target wasm32-unknown-unknown
$(NODE) verify-exported-symbols.js $(TMPDIR)/foo.wasm
$(NODE) verify-exported-symbols.js $(TMPDIR)/bar.wasm
$(RUSTC) bar.rs --target wasm32-unknown-unknown -O
$(NODE) verify-exported-symbols.js $(TMPDIR)/bar.wasm
-else
-all:
-endif
-
-include ../../run-make-fulldeps/tools.mk
-ifeq ($(TARGET),wasm32-unknown-unknown)
+# only-wasm32
+
all:
$(RUSTC) foo.rs --target wasm32-unknown-unknown
$(NODE) verify-no-imports.js $(TMPDIR)/foo.wasm
$(NODE) verify-no-imports.js $(TMPDIR)/foo.wasm
$(RUSTC) foo.rs --target wasm32-unknown-unknown -O -C lto
$(NODE) verify-no-imports.js $(TMPDIR)/foo.wasm
-else
-all:
-endif
-
NormalTT {
expander: Box::new(Expander { args: args, }),
def_info: None,
- allow_internal_unstable: false,
+ allow_internal_unstable: None,
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
--- /dev/null
+// ignore-emscripten no i128 support
+#![feature(const_saturating_int_methods)]
+
+const INT_U32_NO: u32 = (42 as u32).saturating_add(2);
+const INT_U32: u32 = u32::max_value().saturating_add(1);
+const INT_U128: u128 = u128::max_value().saturating_add(1);
+const INT_I128: i128 = i128::max_value().saturating_add(1);
+const INT_I128_NEG: i128 = i128::min_value().saturating_add(-1);
+
+const INT_U32_NO_SUB: u32 = (42 as u32).saturating_sub(2);
+const INT_U32_SUB: u32 = (1 as u32).saturating_sub(2);
+const INT_I32_NO_SUB: i32 = (-42 as i32).saturating_sub(2);
+const INT_I32_NEG_SUB: i32 = i32::min_value().saturating_sub(1);
+const INT_I32_POS_SUB: i32 = i32::max_value().saturating_sub(-1);
+const INT_U128_SUB: u128 = (0 as u128).saturating_sub(1);
+const INT_I128_NEG_SUB: i128 = i128::min_value().saturating_sub(1);
+const INT_I128_POS_SUB: i128 = i128::max_value().saturating_sub(-1);
+
+fn main() {
+ assert_eq!(INT_U32_NO, 44);
+ assert_eq!(INT_U32, u32::max_value());
+ assert_eq!(INT_U128, u128::max_value());
+ assert_eq!(INT_I128, i128::max_value());
+ assert_eq!(INT_I128_NEG, i128::min_value());
+
+ assert_eq!(INT_U32_NO_SUB, 40);
+ assert_eq!(INT_U32_SUB, 0);
+ assert_eq!(INT_I32_NO_SUB, -44);
+ assert_eq!(INT_I32_NEG_SUB, i32::min_value());
+ assert_eq!(INT_I32_POS_SUB, i32::max_value());
+ assert_eq!(INT_U128_SUB, 0);
+ assert_eq!(INT_I128_NEG_SUB, i128::min_value());
+ assert_eq!(INT_I128_POS_SUB, i128::max_value());
+}
// edition:2018
-// compile-flags:-Z unstable-options
-
-// FIXME: once `--edition` is stable in rustdoc, remove that `compile-flags` directive
#![feature(async_await, futures_api)]
-// @has async_fn/struct.S.html
-// @has - '//code' 'pub async fn f()'
-pub struct S;
+// @has async_fn/fn.foo.html '//pre[@class="rust fn"]' 'pub async fn foo() -> Option<Foo>'
+pub async fn foo() -> Option<Foo> {
+ None
+}
+
+// @has async_fn/fn.bar.html '//pre[@class="rust fn"]' 'pub async fn bar(a: i32, b: i32) -> i32'
+pub async fn bar(a: i32, b: i32) -> i32 {
+ 0
+}
+
+// @has async_fn/fn.baz.html '//pre[@class="rust fn"]' 'pub async fn baz<T>(a: T) -> T'
+pub async fn baz<T>(a: T) -> T {
+ a
+}
+
+trait Bar {}
+
+impl Bar for () {}
+
+// @has async_fn/fn.quux.html '//pre[@class="rust fn"]' 'pub async fn quux() -> impl Bar'
+pub async fn quux() -> impl Bar {
+ ()
+}
+
+// @has async_fn/struct.Foo.html
+// @matches - '//code' 'pub async fn f\(\)$'
+pub struct Foo;
-impl S {
+impl Foo {
pub async fn f() {}
}
--- /dev/null
+#![feature(trait_alias)]
+
+#![crate_name = "foo"]
+
+use std::fmt::Debug;
+
+// @has foo/all.html '//a[@href="traitalias.CopyAlias.html"]' 'CopyAlias'
+// @has foo/all.html '//a[@href="traitalias.Alias2.html"]' 'Alias2'
+// @has foo/all.html '//a[@href="traitalias.Foo.html"]' 'Foo'
+
+// @has foo/index.html '//h2[@id="trait-aliases"]' 'Trait aliases'
+// @has foo/index.html '//a[@class="traitalias"]' 'CopyAlias'
+// @has foo/index.html '//a[@class="traitalias"]' 'Alias2'
+// @has foo/index.html '//a[@class="traitalias"]' 'Foo'
+
+// @has foo/traitalias.CopyAlias.html '//section[@id="main"]/pre' 'trait CopyAlias = Copy;'
+pub trait CopyAlias = Copy;
+// @has foo/traitalias.Alias2.html '//section[@id="main"]/pre' 'trait Alias2 = Copy + Debug;'
+pub trait Alias2 = Copy + Debug;
+// @has foo/traitalias.Foo.html '//section[@id="main"]/pre' 'trait Foo<T> = Into<T> + Debug;'
+pub trait Foo<T> = Into<T> + Debug;
--> $DIR/bad-lint-cap2.rs:6:5
|
LL | use std::option; //~ ERROR
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/bad-lint-cap2.rs:4:9
--> $DIR/bad-lint-cap3.rs:7:5
|
LL | use std::option; //~ WARN
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/bad-lint-cap3.rs:4:9
macro_rules! bar {
() => {
// more layers don't help:
- #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
+ #[allow_internal_unstable()] //~ ERROR allow_internal_unstable side-steps
macro_rules! baz {
() => {}
}
error[E0658]: allow_internal_unstable side-steps feature gating and stability checks
--> $DIR/feature-gate-allow-internal-unstable-nested-macro.rs:8:9
|
-LL | #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[allow_internal_unstable()] //~ ERROR allow_internal_unstable side-steps
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...
LL | bar!();
| ------- in this macro invocation
// checks that this attribute is caught on non-macro items.
// this needs a different test since this is done after expansion
-#[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
+#[allow_internal_unstable()] //~ ERROR allow_internal_unstable side-steps
struct S;
fn main() {}
error[E0658]: allow_internal_unstable side-steps feature gating and stability checks
--> $DIR/feature-gate-allow-internal-unstable-struct.rs:4:1
|
-LL | #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[allow_internal_unstable()] //~ ERROR allow_internal_unstable side-steps
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: add #![feature(allow_internal_unstable)] to the crate attributes to enable
#![allow(unused_macros)]
-#[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
+#[allow_internal_unstable()] //~ ERROR allow_internal_unstable side-steps
macro_rules! foo {
() => {}
}
error[E0658]: allow_internal_unstable side-steps feature gating and stability checks
--> $DIR/feature-gate-allow-internal-unstable.rs:3:1
|
-LL | #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[allow_internal_unstable()] //~ ERROR allow_internal_unstable side-steps
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: add #![feature(allow_internal_unstable)] to the crate attributes to enable
--> $DIR/unused.rs:7:24
|
LL | pub(super) use super::f; //~ ERROR unused
- | ^^^^^^^^
+ | ---------------^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/unused.rs:1:9
}
#[stable(feature = "stable", since = "1.0.0")]
-#[allow_internal_unstable]
+#[allow_internal_unstable(function)]
#[macro_export]
macro_rules! call_unstable_allow {
() => { $crate::unstable() }
}
#[stable(feature = "stable", since = "1.0.0")]
-#[allow_internal_unstable]
+#[allow_internal_unstable(struct_field)]
#[macro_export]
macro_rules! construct_unstable_allow {
($e: expr) => {
}
#[stable(feature = "stable", since = "1.0.0")]
-#[allow_internal_unstable]
+#[allow_internal_unstable(method)]
#[macro_export]
macro_rules! call_method_allow {
($e: expr) => { $e.method() }
}
#[stable(feature = "stable", since = "1.0.0")]
-#[allow_internal_unstable]
+#[allow_internal_unstable(struct_field, struct2_field)]
#[macro_export]
macro_rules! access_field_allow {
($e: expr) => { $e.x }
}
#[stable(feature = "stable", since = "1.0.0")]
-#[allow_internal_unstable]
+#[allow_internal_unstable()]
#[macro_export]
macro_rules! pass_through_allow {
($e: expr) => { $e }
}}
}
-#[allow_internal_unstable]
+#[allow_internal_unstable(function)]
macro_rules! bar {
($e: expr) => {{
foo!($e,
--> $DIR/issue-30730.rs:3:5
|
LL | use std::thread;
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/issue-30730.rs:2:9
--> $DIR/lint-directives-on-use-items-issue-10534.rs:12:9
|
LL | use a::x; //~ ERROR: unused import
- | ^^^^
+ | ----^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/lint-directives-on-use-items-issue-10534.rs:1:9
--> $DIR/lint-directives-on-use-items-issue-10534.rs:21:9
|
LL | use a::y; //~ ERROR: unused import
- | ^^^^
+ | ----^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/lint-directives-on-use-items-issue-10534.rs:20:12
use std::mem::*; // shouldn't get errors for not using
// everything imported
use std::fmt::{};
-//~^ ERROR unused import: `use std::fmt::{};`
+//~^ ERROR unused import: `std::fmt::{}`
// Should get errors for both 'Some' and 'None'
use std::option::Option::{Some, None};
-error: unused import: `use std::fmt::{};`
- --> $DIR/lint-unused-imports.rs:8:1
+error: unused import: `std::fmt::{}`
+ --> $DIR/lint-unused-imports.rs:8:5
|
LL | use std::fmt::{};
- | ^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/lint-unused-imports.rs:1:9
--> $DIR/lint-unused-imports.rs:12:27
|
LL | use std::option::Option::{Some, None};
- | ^^^^ ^^^^
+ | --------------------------^^^^--^^^^-- help: remove the whole `use` item
error: unused import: `test::A`
--> $DIR/lint-unused-imports.rs:15:5
|
LL | use test::A; //~ ERROR unused import: `test::A`
- | ^^^^^^^
+ | ----^^^^^^^- help: remove the whole `use` item
error: unused import: `bar`
--> $DIR/lint-unused-imports.rs:24:18
|
LL | use test2::{foo, bar}; //~ ERROR unused import: `bar`
- | ^^^
+ | --^^^
+ | |
+ | help: remove the unused import
error: unused import: `foo::Square`
--> $DIR/lint-unused-imports.rs:52:13
|
LL | use foo::Square; //~ ERROR unused import: `foo::Square`
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
error: unused import: `self::g`
--> $DIR/lint-unused-imports.rs:68:9
|
LL | use self::g; //~ ERROR unused import: `self::g`
- | ^^^^^^^
+ | ----^^^^^^^- help: remove the whole `use` item
error: unused import: `test2::foo`
--> $DIR/lint-unused-imports.rs:77:9
|
LL | use test2::foo; //~ ERROR unused import: `test2::foo`
- | ^^^^^^^^^^
+ | ----^^^^^^^^^^- help: remove the whole `use` item
error: unused import: `test::B2`
--> $DIR/lint-unused-imports.rs:20:5
--> $DIR/lints-in-foreign-macros.rs:11:16
|
LL | () => {use std::string::ToString;} //~ WARN: unused import
- | ^^^^^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
...
LL | mod a { foo!(); }
| ------- in this macro invocation
--> $DIR/lints-in-foreign-macros.rs:16:18
|
LL | mod c { baz!(use std::string::ToString;); } //~ WARN: unused import
- | ^^^^^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
warning: unused import: `std::string::ToString`
--> $DIR/lints-in-foreign-macros.rs:17:19
|
LL | mod d { baz2!(use std::string::ToString;); } //~ WARN: unused import
- | ^^^^^^^^^^^^^^^^^^^^^
+ | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
warning: missing documentation for crate
--> $DIR/lints-in-foreign-macros.rs:4:1
--- /dev/null
+#![allow(warnings)]
+#![feature(nll)]
+
+fn main() {
+ let i = &3;
+
+ let f = |x: &i32| -> &i32 { x };
+ //~^ ERROR lifetime may not live long enough
+ let j = f(i);
+
+ let g = |x: &i32| { x };
+ //~^ ERROR lifetime may not live long enough
+ let k = g(i);
+}
--- /dev/null
+error: lifetime may not live long enough
+ --> $DIR/issue-58053.rs:7:33
+ |
+LL | let f = |x: &i32| -> &i32 { x };
+ | - ---- ^ returning this value requires that `'1` must outlive `'2`
+ | | |
+ | | return type of closure is &'2 i32
+ | let's call the lifetime of this reference `'1`
+
+error: lifetime may not live long enough
+ --> $DIR/issue-58053.rs:11:25
+ |
+LL | let g = |x: &i32| { x };
+ | - - ^ returning this value requires that `'1` must outlive `'2`
+ | | |
+ | | return type of closure is &'2 i32
+ | let's call the lifetime of this reference `'1`
+
+error: aborting due to 2 previous errors
+
--> $DIR/basic.rs:26:9
|
LL | use m::Tr1 as _; //~ WARN unused import
- | ^^^^^^^^^^^
+ | ----^^^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/basic.rs:4:9
--> $DIR/basic.rs:27:9
|
LL | use S as _; //~ WARN unused import
- | ^^^^^^
+ | ----^^^^^^- help: remove the whole `use` item
--> $DIR/unused-2018.rs:6:9
|
LL | use core::any; //~ ERROR unused import: `core::any`
- | ^^^^^^^^^
+ | ----^^^^^^^^^- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/unused-2018.rs:3:9
--> $DIR/unused-2018.rs:10:9
|
LL | use core; //~ ERROR unused import: `core`
- | ^^^^
+ | ----^^^^- help: remove the whole `use` item
error: aborting due to 2 previous errors
LL | #![warn(unused)]
| ^^^^^^
= note: #[warn(unused_imports)] implied by #[warn(unused)]
+help: remove the unused imports
+ |
+LL | use std::cmp::{min};
+ | -- --
use foo::bar::baz::{*, *};
//~^ ERROR unused import: `*`
use foo::{};
- //~^ ERROR unused import: `use foo::{};`
+ //~^ ERROR unused import: `foo::{}`
fn main() {
let _: Bar;
--> $DIR/use-nested-groups-unused-imports.rs:16:11
|
LL | use foo::{Foo, bar::{baz::{}, foobar::*}, *};
- | ^^^ ^^^^^^^ ^^^^^^^^^ ^
+ | ----------^^^--------^^^^^^^--^^^^^^^^^---^-- help: remove the whole `use` item
|
note: lint level defined here
--> $DIR/use-nested-groups-unused-imports.rs:3:9
--> $DIR/use-nested-groups-unused-imports.rs:18:24
|
LL | use foo::bar::baz::{*, *};
- | ^
+ | --^
+ | |
+ | help: remove the unused import
-error: unused import: `use foo::{};`
- --> $DIR/use-nested-groups-unused-imports.rs:20:1
+error: unused import: `foo::{}`
+ --> $DIR/use-nested-groups-unused-imports.rs:20:5
|
LL | use foo::{};
- | ^^^^^^^^^^^^
+ | ----^^^^^^^- help: remove the whole `use` item
error: aborting due to 3 previous errors
-Subproject commit 4e74e2fc0908524d17735c768067117d3e84ee9c
+Subproject commit 865cb70106a6b1171a500ff68f93ab52eea56e72
("androideabi", "android"),
("bitrig", "bitrig"),
("cloudabi", "cloudabi"),
+ ("cuda", "cuda"),
("darwin", "macos"),
("dragonfly", "dragonfly"),
("emscripten", "emscripten"),
("l4re", "l4re"),
("linux", "linux"),
("mingw32", "windows"),
+ ("none", "none"),
("netbsd", "netbsd"),
("openbsd", "openbsd"),
("redox", "redox"),
+ ("sgx", "sgx"),
("solaris", "solaris"),
("win32", "windows"),
("windows", "windows"),
("armv7", "arm"),
("armv7s", "arm"),
("asmjs", "asmjs"),
+ ("cuda", "cuda"),
("hexagon", "hexagon"),
("i386", "x86"),
("i586", "x86"),
fn test_get_arch() {
assert_eq!("x86_64", get_arch("x86_64-unknown-linux-gnu"));
assert_eq!("x86_64", get_arch("amd64"));
+ assert_eq!("cuda", get_arch("nvptx64-nvidia-cuda"));
}
#[test]
assert!(matches_os("wasm32-unknown-unknown", "emscripten"));
assert!(matches_os("wasm32-unknown-unknown", "wasm32-bare"));
assert!(!matches_os("wasm32-unknown-unknown", "windows"));
+ assert!(matches_os("thumbv6m0-none-eabi", "none"));
+ assert!(matches_os("riscv32imc-unknown-none-elf", "none"));
+ assert!(matches_os("nvptx64-nvidia-cuda", "cuda"));
+ assert!(matches_os("x86_64-fortanix-unknown-sgx", "sgx"));
}
description = """
Hack for the compiler's own build system
"""
+edition = "2018"
[lib]
path = "lib.rs"
#![feature(no_core)]
#![no_core]
-
-extern crate core;
+#![deny(rust_2018_idioms)]
pub use core::*;
description = """
Hack for the compiler's own build system
"""
+edition = "2018"
[lib]
path = "lib.rs"
"src/libstd/sys_common/net.rs",
"src/libterm", // Not sure how to make this crate portable, but test crate needs it.
"src/libtest", // Probably should defer to unstable `std::sys` APIs.
+ "src/libstd/sync/mpsc", // some tests are only run on non-emscripten
// std testing crates, okay for now at least
"src/libcore/tests",