python x.py build src/libcore --stage 0
```
-You can explore the build system throught the various `--help` pages for each
+You can explore the build system through the various `--help` pages for each
subcommand. For example to learn more about a command you can run:
```
valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples"
valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH"
valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH"
+valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries"
# On Windows this determines root of the subtree for target libraries.
# Host runtime libs always go to 'bin'.
CFG_PREFIX=${CFG_PREFIX%/}
CFG_MANDIR=${CFG_MANDIR%/}
CFG_DOCDIR=${CFG_DOCDIR%/}
+CFG_BINDIR=${CFG_BINDIR%/}
CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
putvar CFG_NACL_CROSS_PATH
putvar CFG_MANDIR
putvar CFG_DOCDIR
+putvar CFG_BINDIR
putvar CFG_USING_LIBCPP
msg
[[package]]
name = "lzma-sys"
-version = "0.1.3"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "lzma-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
-"checksum lzma-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c5eaaa53b35fa17482ee2c001b04242827b47ae0faba72663fee3dee32366248"
+"checksum lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "fedff6a5cbb24494ec6ee4784e9ac5c187161fede04c7767d49bf87544013afa"
"checksum mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "f1e2e9d848514dcfad4195788d0d42ae5153a477c191d75d5b84fab10f222fbd"
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
"checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726"
// Fallback musl-root for all targets
pub musl_root: Option<PathBuf>,
pub prefix: Option<PathBuf>,
+ pub sysconfdir: Option<PathBuf>,
pub docdir: Option<PathBuf>,
+ pub bindir: Option<PathBuf>,
pub libdir: Option<PathBuf>,
pub libdir_relative: Option<PathBuf>,
pub mandir: Option<PathBuf>,
#[derive(RustcDecodable, Default, Clone)]
struct Install {
prefix: Option<String>,
- mandir: Option<String>,
+ sysconfdir: Option<String>,
docdir: Option<String>,
+ bindir: Option<String>,
libdir: Option<String>,
+ mandir: Option<String>,
}
/// TOML representation of how the LLVM build is configured.
if let Some(ref install) = toml.install {
config.prefix = install.prefix.clone().map(PathBuf::from);
- config.mandir = install.mandir.clone().map(PathBuf::from);
+ config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from);
config.docdir = install.docdir.clone().map(PathBuf::from);
+ config.bindir = install.bindir.clone().map(PathBuf::from);
config.libdir = install.libdir.clone().map(PathBuf::from);
+ config.mandir = install.mandir.clone().map(PathBuf::from);
}
if let Some(ref llvm) = toml.llvm {
"CFG_PREFIX" => {
self.prefix = Some(PathBuf::from(value));
}
+ "CFG_SYSCONFDIR" => {
+ self.sysconfdir = Some(PathBuf::from(value));
+ }
"CFG_DOCDIR" => {
self.docdir = Some(PathBuf::from(value));
}
+ "CFG_BINDIR" => {
+ self.bindir = Some(PathBuf::from(value));
+ }
"CFG_LIBDIR" => {
self.libdir = Some(PathBuf::from(value));
}
# Instead of installing to /usr/local, install to this path instead.
#prefix = "/usr/local"
+# Where to install system configuration files
+# If this is a relative path, it will get installed in `prefix` above
+#sysconfdir = "/etc"
+
+# Where to install documentation in `prefix` above
+#docdir = "share/doc/rust"
+
+# Where to install binaries in `prefix` above
+#bindir = "bin"
+
# Where to install libraries in `prefix` above
#libdir = "lib"
# Where to install man pages in `prefix` above
#mandir = "share/man"
-# Where to install documentation in `prefix` above
-#docdir = "share/doc/rust"
-
# =============================================================================
# Options for compiling Rust code itself
# =============================================================================
/// Installs everything.
pub fn install(build: &Build, stage: u32, host: &str) {
let prefix_default = PathBuf::from("/usr/local");
+ let sysconfdir_default = PathBuf::from("/etc");
let docdir_default = PathBuf::from("share/doc/rust");
- let mandir_default = PathBuf::from("share/man");
+ let bindir_default = PathBuf::from("bin");
let libdir_default = PathBuf::from("lib");
+ let mandir_default = PathBuf::from("share/man");
let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
+ let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
+ let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
+ let sysconfdir = prefix.join(sysconfdir);
let docdir = prefix.join(docdir);
+ let bindir = prefix.join(bindir);
let libdir = prefix.join(libdir);
let mandir = prefix.join(mandir);
let destdir = env::var_os("DESTDIR").map(PathBuf::from);
let prefix = add_destdir(&prefix, &destdir);
+ let sysconfdir = add_destdir(&sysconfdir, &destdir);
let docdir = add_destdir(&docdir, &destdir);
+ let bindir = add_destdir(&bindir, &destdir);
let libdir = add_destdir(&libdir, &destdir);
let mandir = add_destdir(&mandir, &destdir);
t!(fs::create_dir_all(&empty_dir));
if build.config.docs {
install_sh(&build, "docs", "rust-docs", &build.rust_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
for target in build.config.target.iter() {
install_sh(&build, "std", "rust-std", &build.rust_package_vers(),
- stage, target, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, target, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
if build.config.extended {
install_sh(&build, "cargo", "cargo", &build.cargo_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
install_sh(&build, "rls", "rls", &build.rls_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
install_sh(&build, "rustc", "rustc", &build.rust_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
t!(fs::remove_dir_all(&empty_dir));
}
fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u32, host: &str,
- prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
+ prefix: &Path, sysconfdir: &Path, docdir: &Path, bindir: &Path, libdir: &Path,
+ mandir: &Path, empty_dir: &Path) {
println!("Install {} stage{} ({})", package, stage, host);
let package_name = format!("{}-{}-{}", name, version, host);
cmd.current_dir(empty_dir)
.arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
.arg(format!("--prefix={}", sanitize_sh(prefix)))
+ .arg(format!("--sysconfdir={}", sanitize_sh(sysconfdir)))
.arg(format!("--docdir={}", sanitize_sh(docdir)))
+ .arg(format!("--bindir={}", sanitize_sh(bindir)))
.arg(format!("--libdir={}", sanitize_sh(libdir)))
.arg(format!("--mandir={}", sanitize_sh(mandir)))
.arg("--disable-ldconfig");
use super::InferCtxt;
use super::{MiscVariable, TypeTrace};
+use hir::def_id::DefId;
use ty::{IntType, UintType};
use ty::{self, Ty, TyCtxt};
use ty::error::TypeError;
use ty::relate::{self, Relate, RelateResult, TypeRelation};
+use ty::subst::Substs;
use traits::{Obligation, PredicateObligations};
use syntax::ast;
Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?))
}
+ fn relate_item_substs(&mut self,
+ item_def_id: DefId,
+ a_subst: &'tcx Substs<'tcx>,
+ b_subst: &'tcx Substs<'tcx>)
+ -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+ {
+ if self.ambient_variance == ty::Variance::Invariant {
+ // Avoid fetching the variance if we are in an invariant
+ // context; no need, and it can induce dependency cycles
+ // (e.g. #41849).
+ relate::relate_substs(self, None, a_subst, b_subst)
+ } else {
+ let opt_variances = self.tcx().variances_of(item_def_id);
+ relate::relate_substs(self, Some(&opt_variances), a_subst, b_subst)
+ }
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
variance: ty::Variance,
a: &T,
output: $output:tt) => {
define_map_struct! {
tcx: $tcx,
- ready: ([pub] $attrs $name),
+ ready: ([] $attrs $name),
input: ($($input)*),
output: $output
}
MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?
}
- write!(w, " {}", tcx.node_path_str(src.item_id()))?;
+ item_path::with_forced_impl_filename_line(|| { // see notes on #41697 elsewhere
+ write!(w, " {}", tcx.node_path_str(src.item_id()))
+ })?;
if let MirSource::Fn(_) = src {
write!(w, "(")?;
let bound_list = unsatisfied_predicates.iter()
.map(|p| format!("`{} : {}`", p.self_ty(), p))
.collect::<Vec<_>>()
- .join(", ");
+ .join("\n");
err.note(&format!("the method `{}` exists but the following trait bounds \
- were not satisfied: {}",
+ were not satisfied:\n{}",
item_name,
bound_list));
}
return err_info;
}
+ // Here we are considering a case of converting
+ // `S<P0...Pn>` to S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
+ // which acts like a pointer to `U`, but carries along some extra data of type `T`:
+ //
+ // struct Foo<T, U> {
+ // extra: T,
+ // ptr: *mut U,
+ // }
+ //
+ // We might have an impl that allows (e.g.) `Foo<T, [i32; 3]>` to be unsized
+ // to `Foo<T, [i32]>`. That impl would look like:
+ //
+ // impl<T, U: Unsize<V>, V> CoerceUnsized<Foo<T, V>> for Foo<T, U> {}
+ //
+ // Here `U = [i32; 3]` and `V = [i32]`. At runtime,
+ // when this coercion occurs, we would be changing the
+ // field `ptr` from a thin pointer of type `*mut [i32;
+ // 3]` to a fat pointer of type `*mut [i32]` (with
+ // extra data `3`). **The purpose of this check is to
+ // make sure that we know how to do this conversion.**
+ //
+ // To check if this impl is legal, we would walk down
+ // the fields of `Foo` and consider their types with
+ // both substitutes. We are looking to find that
+ // exactly one (non-phantom) field has changed its
+ // type, which we will expect to be the pointer that
+ // is becoming fat (we could probably generalize this
+ // to mutiple thin pointers of the same type becoming
+ // fat, but we don't). In this case:
+ //
+ // - `extra` has type `T` before and type `T` after
+ // - `ptr` has type `*mut U` before and type `*mut V` after
+ //
+ // Since just one field changed, we would then check
+ // that `*mut U: CoerceUnsized<*mut V>` is implemented
+ // (in other words, that we know how to do this
+ // conversion). This will work out because `U:
+ // Unsize<V>`, and we have a builtin rule that `*mut
+ // U` can be coerced to `*mut V` if `U: Unsize<V>`.
let fields = &def_a.struct_variant().fields;
let diff_fields = fields.iter()
.enumerate()
return None;
}
- // Ignore fields that aren't significantly changed
- if let Ok(ok) = infcx.sub_types(false, &cause, b, a) {
+ // Ignore fields that aren't changed; it may
+ // be that we could get away with subtyping or
+ // something more accepting, but we use
+ // equality because we want to be able to
+ // perform this check without computing
+ // variance where possible. (This is because
+ // we may have to evaluate constraint
+ // expressions in the course of execution.)
+ // See e.g. #41936.
+ if let Ok(ok) = infcx.eq_types(false, &cause, b, a) {
if ok.obligations.is_empty() {
return None;
}
let quot = if f.alternate() { "\"" } else { """ };
match self.0 {
Abi::Rust => Ok(()),
- Abi::C => write!(f, "extern "),
abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()),
}
}
top: 0;
}
+h3 > .collapse-toggle, h4 > .collapse-toggle {
+ font-size: 0.8em;
+ top: 5px;
+}
+
.toggle-wrapper > .collapse-toggle {
left: -24px;
margin-top: 0px;
/// A thread local storage key which owns its contents.
///
/// This key uses the fastest possible implementation available to it for the
-/// target platform. It is instantiated with the `thread_local!` macro and the
-/// primary method is the `with` method.
+/// target platform. It is instantiated with the [`thread_local!`] macro and the
+/// primary method is the [`with`] method.
///
-/// The `with` method yields a reference to the contained value which cannot be
+/// The [`with`] method yields a reference to the contained value which cannot be
/// sent across threads or escape the given closure.
///
/// # Initialization and Destruction
///
-/// Initialization is dynamically performed on the first call to `with()`
-/// within a thread, and values that implement `Drop` get destructed when a
+/// Initialization is dynamically performed on the first call to [`with`]
+/// within a thread, and values that implement [`Drop`] get destructed when a
/// thread exits. Some caveats apply, which are explained below.
///
/// # Examples
/// 3. On macOS, initializing TLS during destruction of other TLS slots can
/// sometimes cancel *all* destructors for the current thread, whether or not
/// the slots have already had their destructors run or not.
+///
+/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+/// [`thread_local!`]: ../../std/macro.thread_local.html
+/// [`Drop`]: ../../std/ops/trait.Drop.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct LocalKey<T: 'static> {
// This outer `LocalKey<T>` type is what's going to be stored in statics,
}
}
-/// Declare a new thread local storage key of type `std::thread::LocalKey`.
+/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
///
/// # Syntax
///
/// # fn main() {}
/// ```
///
-/// See [LocalKey documentation](thread/struct.LocalKey.html) for more
+/// See [LocalKey documentation][`std::thread::LocalKey`] for more
/// information.
+///
+/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow_internal_unstable]
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum LocalKeyState {
/// All keys are in this state whenever a thread starts. Keys will
- /// transition to the `Valid` state once the first call to `with` happens
+ /// transition to the `Valid` state once the first call to [`with`] happens
/// and the initialization expression succeeds.
///
/// Keys in the `Uninitialized` state will yield a reference to the closure
- /// passed to `with` so long as the initialization routine does not panic.
+ /// passed to [`with`] so long as the initialization routine does not panic.
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Uninitialized,
/// Once a key has been accessed successfully, it will enter the `Valid`
/// `Destroyed` state.
///
/// Keys in the `Valid` state will be guaranteed to yield a reference to the
- /// closure passed to `with`.
+ /// closure passed to [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Valid,
/// When a thread exits, the destructors for keys will be run (if
/// destructor has run, a key is in the `Destroyed` state.
///
/// Keys in the `Destroyed` states will trigger a panic when accessed via
- /// `with`.
+ /// [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Destroyed,
}
/// Query the current state of this key.
///
/// A key is initially in the `Uninitialized` state whenever a thread
- /// starts. It will remain in this state up until the first call to `with`
+ /// starts. It will remain in this state up until the first call to [`with`]
/// within a thread has run the initialization expression successfully.
///
/// Once the initialization expression succeeds, the key transitions to the
- /// `Valid` state which will guarantee that future calls to `with` will
+ /// `Valid` state which will guarantee that future calls to [`with`] will
/// succeed within the thread.
///
/// When a thread exits, each key will be destroyed in turn, and as keys are
/// destroyed they will enter the `Destroyed` state just before the
/// destructor starts to run. Keys may remain in the `Destroyed` state after
/// destruction has completed. Keys without destructors (e.g. with types
- /// that are `Copy`), may never enter the `Destroyed` state.
+ /// that are [`Copy`]), may never enter the `Destroyed` state.
///
/// Keys in the `Uninitialized` state can be accessed so long as the
/// initialization does not panic. Keys in the `Valid` state are guaranteed
/// to be able to be accessed. Keys in the `Destroyed` state will panic on
- /// any call to `with`.
+ /// any call to [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+ /// [`Copy`]: ../../std/marker/trait.Copy.html
#[unstable(feature = "thread_local_state",
reason = "state querying was recently added",
issue = "27716")]
// Builder
////////////////////////////////////////////////////////////////////////////////
-/// Thread configuration. Provides detailed control over the properties
-/// and behavior of new threads.
+/// Thread factory, which can be used in order to configure the properties of
+/// a new thread.
+///
+/// Methods can be chained on it in order to configure it.
+///
+/// The two configurations available are:
+///
+/// - [`name`]: allows to give a name to the thread which is currently
+/// only used in `panic` messages.
+/// - [`stack_size`]: specifies the desired stack size. Note that this can
+/// be overriden by the OS.
+///
+/// If the [`stack_size`] field is not specified, the stack size
+/// will be the `RUST_MIN_STACK` environment variable. If it is
+/// not specified either, a sensible default will be set.
+///
+/// If the [`name`] field is not specified, the thread will not be named.
+///
+/// The [`spawn`] method will take ownership of the builder and create an
+/// [`io::Result`] to the thread handle with the given configuration.
+///
+/// The [`thread::spawn`] free function uses a `Builder` with default
+/// configuration and [`unwrap`]s its return value.
+///
+/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want
+/// to recover from a failure to launch a thread, indeed the free function will
+/// panick where the `Builder` method will return a [`io::Result`].
///
/// # Examples
///
///
/// handler.join().unwrap();
/// ```
+///
+/// [`thread::spawn`]: ../../std/thread/fn.spawn.html
+/// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
+/// [`name`]: ../../std/thread/struct.Builder.html#method.name
+/// [`spawn`]: ../../std/thread/struct.Builder.html#method.spawn
+/// [`io::Result`]: ../../std/io/type.Result.html
+/// [`unwrap`]: ../../std/result/enum.Result.html#method.unwrap
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct Builder {
/// Generates the base configuration for spawning a thread, from which
/// configuration methods can be chained.
///
- /// If the [`stack_size`] field is not specified, the stack size
- /// will be the `RUST_MIN_STACK` environment variable. If it is
- /// not specified either, a sensible default will be set (2MB as
- /// of the writting of this doc).
- ///
/// # Examples
///
/// ```
///
/// handler.join().unwrap();
/// ```
- ///
- /// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> Builder {
Builder {
self
}
- /// Spawns a new thread, and returns a join handle for it.
+ /// Spawns a new thread by taking ownership of the `Builder`, and returns an
+ /// [`io::Result`] to its [`JoinHandle`].
///
- /// The child thread may outlive the parent (unless the parent thread
+ /// The spawned thread may outlive the caller (unless the caller thread
/// is the main thread; the whole process is terminated when the main
/// thread finishes). The join handle can be used to block on
/// termination of the child thread, including recovering its panics.
///
/// [`spawn`]: ../../std/thread/fn.spawn.html
/// [`io::Result`]: ../../std/io/type.Result.html
+ /// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
///
/// # Examples
///
/// Cooperatively gives up a timeslice to the OS scheduler.
///
+/// This is used when the programmer knows that the thread will have nothing
+/// to do for some time, and thus avoid wasting computing time.
+///
+/// For example when polling on a resource, it is common to check that it is
+/// available, and if not to yield in order to avoid busy waiting.
+///
+/// Thus the pattern of `yield`ing after a failed poll is rather common when
+/// implementing low-level shared resources or synchronization primitives.
+///
+/// However programmers will usualy prefer to use, [`channel`]s, [`Condvar`]s,
+/// [`Mutex`]es or [`join`] for their synchronisation routines, as they avoid
+/// thinking about thread schedulling.
+///
+/// Note that [`channel`]s for example are implemented using this primitive.
+/// Indeed when you call `send` or `recv`, which are blocking, they will yield
+/// if the channel is not available.
+///
/// # Examples
///
/// ```
///
/// thread::yield_now();
/// ```
+///
+/// [`channel`]: ../../std/sync/mpsc/index.html
+/// [`spawn`]: ../../std/thread/fn.spawn.html
+/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
+/// [`Mutex`]: ../../std/sync/struct.Mutex.html
+/// [`Condvar`]: ../../std/sync/struct.Condvar.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn yield_now() {
imp::Thread::yield_now()
StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| {
(mac, MacStmtStyle::Semicolon, attrs)
})),
- node @ _ => node,
+ node => node,
};
self
}
pub fn is_unsuffixed(&self) -> bool {
match *self {
// unsuffixed variants
- LitKind::Str(..) => true,
- LitKind::ByteStr(..) => true,
- LitKind::Byte(..) => true,
- LitKind::Char(..) => true,
- LitKind::Int(_, LitIntType::Unsuffixed) => true,
- LitKind::FloatUnsuffixed(..) => true,
+ LitKind::Str(..) |
+ LitKind::ByteStr(..) |
+ LitKind::Byte(..) |
+ LitKind::Char(..) |
+ LitKind::Int(_, LitIntType::Unsuffixed) |
+ LitKind::FloatUnsuffixed(..) |
LitKind::Bool(..) => true,
// suffixed variants
- LitKind::Int(_, LitIntType::Signed(..)) => false,
- LitKind::Int(_, LitIntType::Unsigned(..)) => false,
+ LitKind::Int(_, LitIntType::Signed(..)) |
+ LitKind::Int(_, LitIntType::Unsigned(..)) |
LitKind::Float(..) => false,
}
}
/// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem.
pub fn meta_item(&self) -> Option<&MetaItem> {
match self.node {
- NestedMetaItemKind::MetaItem(ref item) => Some(&item),
+ NestedMetaItemKind::MetaItem(ref item) => Some(item),
_ => None
}
}
/// Returns the Lit if self is a NestedMetaItemKind::Literal.
pub fn literal(&self) -> Option<&Lit> {
match self.node {
- NestedMetaItemKind::Literal(ref lit) => Some(&lit),
+ NestedMetaItemKind::Literal(ref lit) => Some(lit),
_ => None
}
}
match self.node {
MetaItemKind::NameValue(ref v) => {
match v.node {
- LitKind::Str(ref s, _) => Some((*s).clone()),
+ LitKind::Str(ref s, _) => Some(*s),
_ => None,
}
},
Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
}
LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
- LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
- true => "true",
- false => "false",
+ LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
+ "true"
+ } else {
+ "false"
}))),
}
}
impl HasAttrs for Vec<Attribute> {
fn attrs(&self) -> &[Attribute] {
- &self
+ self
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
f(self)
impl HasAttrs for ThinVec<Attribute> {
fn attrs(&self) -> &[Attribute] {
- &self
+ self
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
f(self.into()).into()
match self.span_to_snippet(sp) {
Ok(snippet) => {
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
- if snippet.len() > 0 && !snippet.contains('\n') {
+ if !snippet.is_empty() && !snippet.contains('\n') {
Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
} else {
sp
pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
for fm in self.files.borrow().iter() {
if filename == fm.name {
- (self.dep_tracking_callback.borrow())(&fm);
+ (self.dep_tracking_callback.borrow())(fm);
return Some(fm.clone());
}
}
return false;
}
- let mis = if !is_cfg(&attr) {
+ let mis = if !is_cfg(attr) {
return true;
} else if let Some(mis) = attr.meta_item_list() {
mis
// flag the offending attributes
for attr in attrs.iter() {
if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
- let mut err = feature_err(&self.sess,
+ let mut err = feature_err(self.sess,
"stmt_expr_attributes",
attr.span,
GateIssue::Language,
pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> {
if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
if !field.attrs.is_empty() {
- let mut err = feature_err(&self.sess,
+ let mut err = feature_err(self.sess,
"struct_field_attributes",
field.span,
GateIssue::Language,
for attr in attrs.iter() {
if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
let mut err = feature_err(
- &self.sess,
+ self.sess,
"struct_field_attributes",
attr.span,
GateIssue::Language,
// URLs can be unavoidably longer than the line limit, so we allow them.
// Allowed format is: `[name]: https://www.rust-lang.org/`
- let is_url = |l: &str| l.starts_with('[') && l.contains("]:") && l.contains("http");
+ let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http");
if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) {
ecx.span_err(span, &format!(
if let Err(e) = output_metadata(ecx,
&target_triple,
&crate_name.name.as_str(),
- &diagnostics) {
+ diagnostics) {
ecx.span_bug(span, &format!(
"error writing metadata for triple `{}` and crate `{}`, error: {}, \
cause: {:?}",
MacEager::items(SmallVector::many(vec![
P(ast::Item {
- ident: name.clone(),
+ ident: *name,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Const(
}
/// One of these is made during expansion and incrementally updated as we go;
-/// when a macro expansion occurs, the resulting nodes have the backtrace()
-/// -> expn_info of their expansion context stored into their span.
+/// when a macro expansion occurs, the resulting nodes have the `backtrace()
+/// -> expn_info` of their expansion context stored into their span.
pub struct ExtCtxt<'a> {
pub parse_sess: &'a parse::ParseSess,
pub ecfg: expand::ExpansionConfig<'a>,
}
ctxt = info.call_site.ctxt;
last_macro = Some(info.call_site);
- return Some(());
+ Some(())
}).is_none() {
break
}
}
pub fn trace_macros_diag(&self) {
for (sp, notes) in self.expansions.iter() {
- let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro");
+ let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
for note in notes {
- db.note(¬e);
+ db.note(note);
}
db.emit();
}
v.push(self.ident_of(s));
}
v.extend(components.iter().map(|s| self.ident_of(s)));
- return v
+ v
}
pub fn name_of(&self, st: &str) -> ast::Name {
Symbol::intern(st)
match *ext {
MultiModifier(ref mac) => {
- let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
let item = mac.expand(self.cx, attr.span, &meta, item);
kind.expect_from_annotatables(item)
}
MultiDecorator(ref mac) => {
let mut items = Vec::new();
- let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
items.push(item);
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
- let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+ let item_toks = stream_for_item(&item, self.cx.parse_sess);
let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
}
_ => {
let msg = &format!("macro `{}` may not be used in attributes", attr.path);
- self.cx.span_err(attr.span, &msg);
+ self.cx.span_err(attr.span, msg);
kind.dummy(attr.span)
}
}
};
let path = &mac.node.path;
- let ident = ident.unwrap_or(keywords::Invalid.ident());
+ let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
let opt_expanded = match *ext {
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
}
_ => {
let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
- self.cx.span_err(span, &msg);
+ self.cx.span_err(span, msg);
kind.dummy(span)
}
}
fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
let features = self.cx.ecfg.features.unwrap();
for attr in attrs.iter() {
- feature_gate::check_attribute(&attr, &self.cx.parse_sess, features);
+ feature_gate::check_attribute(attr, self.cx.parse_sess, features);
}
}
}
pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
- for i in 0 .. attrs.len() {
- if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
- return Some(attrs.remove(i));
- }
- }
-
- None
+ attrs.iter()
+ .position(|a| !attr::is_known(a) && !is_builtin_attr(a))
+ .map(|i| attrs.remove(i))
}
// These are pretty nasty. Ideally, we would keep the tokens around, linked from
let result = noop_fold_item(item, self);
self.cx.current_expansion.module = orig_module;
self.cx.current_expansion.directory_ownership = orig_directory_ownership;
- return result;
+ result
}
// Ensure that test functions are accessible from the test harness.
ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => {
///
/// This is registered as a set of expression syntax extension called quote!
/// that lifts its argument token-tree to an AST representing the
-/// construction of the same token tree, with token::SubstNt interpreted
+/// construction of the same token tree, with `token::SubstNt` interpreted
/// as antiquotes (splices).
pub mod rt {
result = results.pop().unwrap();
result.push(tree);
}
- tree @ _ => result.push(tree),
+ tree => result.push(tree),
}
}
result
cx.span_err(sp,
&format!("{} wasn't a utf-8 file",
file.display()));
- return DummyResult::expr(sp);
+ DummyResult::expr(sp)
}
}
}
Err(e) => {
cx.span_err(sp,
&format!("couldn't read {}: {}", file.display(), e));
- return DummyResult::expr(sp);
+ DummyResult::expr(sp)
}
Ok(..) => {
// Add this input file to the code map to make it available as
//! repetitions indicated by Kleene stars. It only advances or calls out to the
//! real Rust parser when no `cur_eis` items remain
//!
-//! Example: Start parsing `a a a a b` against [· a $( a )* a b].
+//! Example:
//!
-//! Remaining input: `a a a a b`
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
//! next_eis: [· a $( a )* a b]
//!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
//!
-//! Remaining input: `a a a b`
+//! Remaining input: a a a b
//! cur: [a · $( a )* a b]
//! Descend/Skip (first item).
//! next: [a $( · a )* a b] [a $( a )* · a b].
//!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
//!
-//! Remaining input: `a a b`
+//! Remaining input: a a b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
-//! Remaining input: `a b`
+//! Remaining input: a b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
-//! Remaining input: `b`
+//! Remaining input: b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b]
//!
-//! - - - Advance over a `b`. - - -
+//! - - - Advance over a b. - - -
//!
-//! Remaining input: ``
+//! Remaining input: ''
//! eof: [a $( a )* a b ·]
+//! ```
pub use self::NamedMatch::*;
pub use self::ParseResult::*;
})
}
-/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
+/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
/// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
-/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
-/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
+/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
+/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
///
-/// The in-memory structure of a particular NamedMatch represents the match
+/// The in-memory structure of a particular `NamedMatch` represents the match
/// that occurred when a particular subset of a matcher was applied to a
/// particular token tree.
///
-/// The width of each MatchedSeq in the NamedMatch, and the identity of the
-/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
-/// each MatchedSeq corresponds to a single TTSeq in the originating
-/// token tree. The depth of the NamedMatch structure will therefore depend
+/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
+/// the `MatchedNonterminal`s, will depend on the token tree it was applied
+/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
+/// token tree. The depth of the `NamedMatch` structure will therefore depend
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
- match (t1,t2) {
- (&token::Ident(id1),&token::Ident(id2))
- | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
- id1.name == id2.name,
- _ => *t1 == *t2
+ if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
+ id1.name == id2.name
+ } else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
+ id1.name == id2.name
+ } else {
+ *t1 == *t2
}
}
// Check if we need a separator
if idx == len && ei.sep.is_some() {
// We have a separator, and it is the current token.
- if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) {
+ if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
ei.idx += 1;
next_eis.push(ei);
}
cur_eis.push(ei);
}
TokenTree::Token(_, ref t) => {
- if token_name_eq(t, &token) {
+ if token_name_eq(t, token) {
ei.idx += 1;
next_eis.push(ei);
}
}
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
- match name {
- "tt" => {
- return token::NtTT(p.parse_token_tree());
- }
- _ => {}
+ if name == "tt" {
+ return token::NtTT(p.parse_token_tree());
}
// check at the beginning and the parser checks after each bump
p.process_potential_macro_variable();
-> Box<MacResult+'cx> {
if cx.trace_macros() {
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
- let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert(vec![]);
+ let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
values.push(format!("expands to `{}! {{ {} }}`", name, arg));
}
let mut valid = true;
// Extract the arguments:
- let lhses = match **argument_map.get(&lhs_nm).unwrap() {
+ let lhses = match *argument_map[&lhs_nm] {
MatchedSeq(ref s, _) => {
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
};
- let rhses = match **argument_map.get(&rhs_nm).unwrap() {
+ let rhses = match *argument_map[&rhs_nm] {
MatchedSeq(ref s, _) => {
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
lhs: "ed::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
- match lhs {
- "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts),
- _ => {
- let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
- sess.span_diagnostic.span_err(lhs.span(), msg);
- false
- }
+ if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
+ check_matcher(sess, features, &tts.tts)
+ } else {
+ let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+ sess.span_diagnostic.span_err(lhs.span(), msg);
+ false
}
// we don't abort on errors on rejection, the driver will do that for us
// after parsing/expansion. we can report every error in every macro this way.
return false;
},
TokenTree::Sequence(span, ref seq) => {
- if seq.separator.is_none() {
- if seq.tts.iter().all(|seq_tt| {
- match *seq_tt {
- TokenTree::Sequence(_, ref sub_seq) =>
- sub_seq.op == quoted::KleeneOp::ZeroOrMore,
- _ => false,
- }
- }) {
- sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
- return false;
+ if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
+ match *seq_tt {
+ TokenTree::Sequence(_, ref sub_seq) =>
+ sub_seq.op == quoted::KleeneOp::ZeroOrMore,
+ _ => false,
}
+ }) {
+ sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+ return false;
}
if !check_lhs_no_empty_seq(sess, &seq.tts) {
return false;
}
}
- return first;
+ first
}
}
// we only exit the loop if `tts` was empty or if every
// element of `tts` matches the empty sequence.
assert!(first.maybe_empty);
- return first;
+ first
}
}
let build_suffix_first = || {
let mut s = first_sets.first(suffix);
if s.maybe_empty { s.add_all(follow); }
- return s;
+ s
};
// (we build `suffix_first` on demand below; you can tell
match *tt {
quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
- _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
+ _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+ in follow set checker"),
}
}
}
}
+ pub fn is_empty(&self) -> bool {
+ match *self {
+ TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+ token::NoDelim => delimed.tts.is_empty(),
+ _ => false,
+ },
+ TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
+ _ => true,
+ }
+ }
+
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
}
_ => end_sp,
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
};
sess.missing_fragment_specifiers.borrow_mut().insert(span);
result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
Some(op) => return (Some(tok), op),
None => span,
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
}
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
};
sess.span_diagnostic.span_err(span, "expected `*` or `+`");
&repeats) {
LockstepIterSize::Unconstrained => {
panic!(sp_diag.span_fatal(
- sp.clone(), /* blame macro writer */
+ sp, /* blame macro writer */
"attempted to repeat an expression \
containing no syntax \
variables matched as repeating at this depth"));
}
LockstepIterSize::Contradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
+ panic!(sp_diag.span_fatal(sp, &msg[..]));
}
LockstepIterSize::Constraint(len, _) => {
if len == 0 {
if seq.op == quoted::KleeneOp::OneOrMore {
// FIXME #2887 blame invoker
- panic!(sp_diag.span_fatal(sp.clone(),
+ panic!(sp_diag.span_fatal(sp,
"this must repeat at least once"));
}
} else {
impl ::std::fmt::Debug for AttributeGate {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
- Gated(ref stab, ref name, ref expl, _) =>
+ Gated(ref stab, name, expl, _) =>
write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
Ungated => write!(fmt, "Ungated")
}
];
// cfg(...)'s that are feature gated
-const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[
+const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
// (name in cfg, feature, function to check if the feature is enabled)
("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
let name = unwrap_or!(attr.name(), return).as_str();
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
if name == n {
- if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
+ if let Gated(_, name, desc, ref has_feature) = *gateage {
gate_feature_fn!(self, has_feature, attr.span, name, desc);
}
debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
NameValue(ref lit) => !lit.node.is_str(),
List(ref list) => list.iter().any(|li| {
match li.node {
- MetaItem(ref mi) => contains_novel_literal(&mi),
+ MetaItem(ref mi) => contains_novel_literal(mi),
Literal(_) => true,
}
}),
return
}
- let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.context.parse_sess));
if contains_novel_literal(&meta) {
gate_feature_post!(&self, attr_literals, attr.span,
"non-string literals in attributes, or string \
}
ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
- match polarity {
- ast::ImplPolarity::Negative => {
- gate_feature_post!(&self, optin_builtin_traits,
- i.span,
- "negative trait bounds are not yet fully implemented; \
- use marker types for now");
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ gate_feature_post!(&self, optin_builtin_traits,
+ i.span,
+ "negative trait bounds are not yet fully implemented; \
+ use marker types for now");
}
if let ast::Defaultness::Default = defaultness {
fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) {
if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty {
- match output_ty.node {
- ast::TyKind::Never => return,
- _ => (),
- };
- self.visit_ty(output_ty)
+ if output_ty.node != ast::TyKind::Never {
+ self.visit_ty(output_ty)
+ }
}
}
span: Span,
_node_id: NodeId) {
// check for const fn declarations
- match fn_kind {
- FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) => {
- gate_feature_post!(&self, const_fn, span, "const fn is unstable");
- }
- _ => {
- // stability of const fn methods are covered in
- // visit_trait_item and visit_impl_item below; this is
- // because default methods don't pass through this
- // point.
- }
+ if let FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) =
+ fn_kind {
+ gate_feature_post!(&self, const_fn, span, "const fn is unstable");
}
+ // stability of const fn methods are covered in
+ // visit_trait_item and visit_impl_item below; this is
+ // because default methods don't pass through this
+ // point.
match fn_kind {
FnKind::ItemFn(_, _, _, _, abi, _, _) |
})
.collect()
})
- .unwrap_or(vec![])
+ .unwrap_or_else(|_| vec![])
}
}
_ => break,
}
}
- return Ok(attrs);
+ Ok(attrs)
}
/// Matches `attribute = # ! [ meta_item ]`
}
let attr = self.parse_attribute(true)?;
- assert!(attr.style == ast::AttrStyle::Inner);
+ assert_eq!(attr.style, ast::AttrStyle::Inner);
attrs.push(attr);
}
token::DocComment(s) => {
}
/// this statement requires a semicolon after it.
-/// note that in one case (stmt_semi), we've already
+/// note that in one case (`stmt_semi`), we've already
/// seen the semicolon, and thus don't need another.
pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
match *stmt {
ast::StmtKind::Local(_) => true,
- ast::StmtKind::Item(_) => false,
ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
- ast::StmtKind::Semi(..) => false,
+ ast::StmtKind::Item(_) |
+ ast::StmtKind::Semi(..) |
ast::StmtKind::Mac(..) => false,
}
}
use parse::token;
-/// SeqSep : a sequence separator (token)
+/// `SeqSep` : a sequence separator (token)
/// and whether a trailing separator is allowed.
pub struct SeqSep {
pub sep: Option<token::Token>,
while j > i && lines[j - 1].trim().is_empty() {
j -= 1;
}
- lines[i..j].iter().cloned().collect()
+ lines[i..j].to_vec()
}
/// remove a "[ \t]*\*" block from each line, if possible
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into next_pos and ch
- pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
+ pub fn new_raw(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, filemap);
sr.bump();
sr
pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw(sess, filemap);
- if let Err(_) = sr.advance_token() {
+ if sr.advance_token().is_err() {
sr.emit_fatal_errors();
panic!(FatalError);
}
sr.bump();
- if let Err(_) = sr.advance_token() {
+ if sr.advance_token().is_err() {
sr.emit_fatal_errors();
panic!(FatalError);
}
self.bump();
}
- return if doc_comment {
+ if doc_comment {
self.with_str_from(start_bpos, |string| {
// comments with only more "/"s are not doc comments
let tok = if is_doc_comment(string) {
tok: token::Comment,
sp: mk_sp(start_bpos, self.pos),
})
- };
+ }
}
Some('*') => {
self.bump();
}
let pos = self.pos;
self.check_float_base(start_bpos, pos, base);
- return token::Float(self.name_from(start_bpos));
+ token::Float(self.name_from(start_bpos))
} else {
// it might be a float if it has an exponent
if self.ch_is('e') || self.ch_is('E') {
return token::Float(self.name_from(start_bpos));
}
// but we certainly have an integer!
- return token::Integer(self.name_from(start_bpos));
+ token::Integer(self.name_from(start_bpos))
}
}
self.bump();
if self.ch_is('=') {
self.bump();
- return token::BinOpEq(op);
+ token::BinOpEq(op)
} else {
- return token::BinOp(op);
+ token::BinOp(op)
}
}
// One-byte tokens.
';' => {
self.bump();
- return Ok(token::Semi);
+ Ok(token::Semi)
}
',' => {
self.bump();
- return Ok(token::Comma);
+ Ok(token::Comma)
}
'.' => {
self.bump();
- return if self.ch_is('.') {
+ if self.ch_is('.') {
self.bump();
if self.ch_is('.') {
self.bump();
}
} else {
Ok(token::Dot)
- };
+ }
}
'(' => {
self.bump();
- return Ok(token::OpenDelim(token::Paren));
+ Ok(token::OpenDelim(token::Paren))
}
')' => {
self.bump();
- return Ok(token::CloseDelim(token::Paren));
+ Ok(token::CloseDelim(token::Paren))
}
'{' => {
self.bump();
- return Ok(token::OpenDelim(token::Brace));
+ Ok(token::OpenDelim(token::Brace))
}
'}' => {
self.bump();
- return Ok(token::CloseDelim(token::Brace));
+ Ok(token::CloseDelim(token::Brace))
}
'[' => {
self.bump();
- return Ok(token::OpenDelim(token::Bracket));
+ Ok(token::OpenDelim(token::Bracket))
}
']' => {
self.bump();
- return Ok(token::CloseDelim(token::Bracket));
+ Ok(token::CloseDelim(token::Bracket))
}
'@' => {
self.bump();
- return Ok(token::At);
+ Ok(token::At)
}
'#' => {
self.bump();
- return Ok(token::Pound);
+ Ok(token::Pound)
}
'~' => {
self.bump();
- return Ok(token::Tilde);
+ Ok(token::Tilde)
}
'?' => {
self.bump();
- return Ok(token::Question);
+ Ok(token::Question)
}
':' => {
self.bump();
if self.ch_is(':') {
self.bump();
- return Ok(token::ModSep);
+ Ok(token::ModSep)
} else {
- return Ok(token::Colon);
+ Ok(token::Colon)
}
}
'$' => {
self.bump();
- return Ok(token::Dollar);
+ Ok(token::Dollar)
}
// Multi-byte tokens.
self.bump();
if self.ch_is('=') {
self.bump();
- return Ok(token::EqEq);
+ Ok(token::EqEq)
} else if self.ch_is('>') {
self.bump();
- return Ok(token::FatArrow);
+ Ok(token::FatArrow)
} else {
- return Ok(token::Eq);
+ Ok(token::Eq)
}
}
'!' => {
self.bump();
if self.ch_is('=') {
self.bump();
- return Ok(token::Ne);
+ Ok(token::Ne)
} else {
- return Ok(token::Not);
+ Ok(token::Not)
}
}
'<' => {
match self.ch.unwrap_or('\x00') {
'=' => {
self.bump();
- return Ok(token::Le);
+ Ok(token::Le)
}
'<' => {
- return Ok(self.binop(token::Shl));
+ Ok(self.binop(token::Shl))
}
'-' => {
self.bump();
match self.ch.unwrap_or('\x00') {
_ => {
- return Ok(token::LArrow);
+ Ok(token::LArrow)
}
}
}
_ => {
- return Ok(token::Lt);
+ Ok(token::Lt)
}
}
}
match self.ch.unwrap_or('\x00') {
'=' => {
self.bump();
- return Ok(token::Ge);
+ Ok(token::Ge)
}
'>' => {
- return Ok(self.binop(token::Shr));
+ Ok(self.binop(token::Shr))
}
_ => {
- return Ok(token::Gt);
+ Ok(token::Gt)
}
}
}
};
self.bump(); // advance ch past token
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::Char(id), suffix));
+ Ok(token::Literal(token::Char(id), suffix))
}
'b' => {
self.bump();
_ => unreachable!(), // Should have been a token::Ident above.
};
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(lit, suffix));
+ Ok(token::Literal(lit, suffix))
}
'"' => {
let start_bpos = self.pos;
};
self.bump();
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::Str_(id), suffix));
+ Ok(token::Literal(token::Str_(id), suffix))
}
'r' => {
let start_bpos = self.pos;
Symbol::intern("??")
};
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
+ Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
}
'-' => {
if self.nextch_is('>') {
self.bump();
self.bump();
- return Ok(token::RArrow);
+ Ok(token::RArrow)
} else {
- return Ok(self.binop(token::Minus));
+ Ok(self.binop(token::Minus))
}
}
'&' => {
if self.nextch_is('&') {
self.bump();
self.bump();
- return Ok(token::AndAnd);
+ Ok(token::AndAnd)
} else {
- return Ok(self.binop(token::And));
+ Ok(self.binop(token::And))
}
}
'|' => {
Some('|') => {
self.bump();
self.bump();
- return Ok(token::OrOr);
+ Ok(token::OrOr)
}
_ => {
- return Ok(self.binop(token::Or));
+ Ok(self.binop(token::Or))
}
}
}
'+' => {
- return Ok(self.binop(token::Plus));
+ Ok(self.binop(token::Plus))
}
'*' => {
- return Ok(self.binop(token::Star));
+ Ok(self.binop(token::Star))
}
'/' => {
- return Ok(self.binop(token::Slash));
+ Ok(self.binop(token::Slash))
}
'^' => {
- return Ok(self.binop(token::Caret));
+ Ok(self.binop(token::Caret))
}
'%' => {
- return Ok(self.binop(token::Percent));
+ Ok(self.binop(token::Percent))
}
c => {
let last_bpos = self.pos;
bpos,
"unknown start of token",
c);
- unicode_chars::check_for_substitution(&self, c, &mut err);
+ unicode_chars::check_for_substitution(self, c, &mut err);
self.fatal_errs.push(err);
Err(())
}
if self.ch_is('\n') {
self.bump();
}
- return val;
+ val
}
fn read_one_line_comment(&mut self) -> String {
let val = self.read_to_eol();
assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') ||
(val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!'));
- return val;
+ val
}
fn consume_non_eol_whitespace(&mut self) {
Symbol::intern("?")
};
self.bump(); // advance ch past token
- return token::Byte(id);
+ token::Byte(id)
}
fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool {
Symbol::intern("??")
};
self.bump();
- return token::ByteStr(id);
+ token::ByteStr(id)
}
fn scan_raw_byte_string(&mut self) -> token::Lit {
self.bump();
}
self.bump();
- return token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
- hash_count);
+ token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
+ hash_count)
}
}
}
fn is_dec_digit(c: Option<char>) -> bool {
- return in_range(c, '0', '9');
+ in_range(c, '0', '9')
}
pub fn is_doc_comment(s: &str) -> bool {
parser.parse_inner_attributes()
}
-pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, ast::Crate> {
+pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<ast::Crate> {
new_parser_from_source_str(sess, name, source).parse_crate_mod()
}
-pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Vec<ast::Attribute>> {
+pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Vec<ast::Attribute>> {
new_parser_from_source_str(sess, name, source).parse_inner_attributes()
}
-pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, P<ast::Expr>> {
+pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<P<ast::Expr>> {
new_parser_from_source_str(sess, name, source).parse_expr()
}
///
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err`
/// when a syntax error occurred.
-pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Option<P<ast::Item>>> {
+pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Option<P<ast::Item>>> {
new_parser_from_source_str(sess, name, source).parse_item()
}
-pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, ast::MetaItem> {
+pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<ast::MetaItem> {
new_parser_from_source_str(sess, name, source).parse_meta_item()
}
-pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Option<ast::Stmt>> {
+pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Option<ast::Stmt>> {
new_parser_from_source_str(sess, name, source).parse_stmt()
}
-pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
-> TokenStream {
filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
}
// Create a new parser from a source string
-pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
- -> Parser<'a> {
+pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String)
+ -> Parser {
filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
}
}
/// Given a filemap and config, return a parser
-pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
+pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
let end_pos = filemap.end_pos;
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
stream_to_parser(sess, tts.into_iter().collect())
}
panictry!(srdr.parse_all_token_trees())
}
-/// Given stream and the ParseSess, produce a parser
-pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+/// Given stream and the `ParseSess`, produce a parser
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
Parser::new(sess, stream, None, false)
}
(c, 4)
}
'u' => {
- assert!(lit.as_bytes()[2] == b'{');
+ assert_eq!(lit.as_bytes()[2], b'{');
let idx = lit.find('}').unwrap();
let v = u32::from_str_radix(&lit[3..idx], 16).unwrap();
let c = char::from_u32(v).unwrap();
}
let mut chars = lit.char_indices().peekable();
- loop {
- match chars.next() {
- Some((i, c)) => {
- match c {
- '\\' => {
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
-
- if ch == '\n' {
- eat(&mut chars);
- } else if ch == '\r' {
- chars.next();
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
-
- if ch != '\n' {
- panic!("lexer accepted bare CR");
- }
- eat(&mut chars);
- } else {
- // otherwise, a normal escape
- let (c, n) = char_lit(&lit[i..]);
- for _ in 0..n - 1 { // we don't need to move past the first \
- chars.next();
- }
- res.push(c);
- }
- },
- '\r' => {
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
+ while let Some((i, c)) = chars.next() {
+ match c {
+ '\\' => {
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
+
+ if ch == '\n' {
+ eat(&mut chars);
+ } else if ch == '\r' {
+ chars.next();
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
- if ch != '\n' {
- panic!("lexer accepted bare CR");
- }
+ if ch != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ eat(&mut chars);
+ } else {
+ // otherwise, a normal escape
+ let (c, n) = char_lit(&lit[i..]);
+ for _ in 0..n - 1 { // we don't need to move past the first \
chars.next();
- res.push('\n');
}
- c => res.push(c),
+ res.push(c);
}
},
- None => break
+ '\r' => {
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
+
+ if ch != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ chars.next();
+ res.push('\n');
+ }
+ c => res.push(c),
}
}
debug!("raw_str_lit: given {}", escape_default(lit));
let mut res = String::with_capacity(lit.len());
- // FIXME #8372: This could be a for-loop if it didn't borrow the iterator
let mut chars = lit.chars().peekable();
- loop {
- match chars.next() {
- Some(c) => {
- if c == '\r' {
- if *chars.peek().unwrap() != '\n' {
- panic!("lexer accepted bare CR");
- }
- chars.next();
- res.push('\n');
- } else {
- res.push(c);
- }
- },
- None => break
+ while let Some(c) = chars.next() {
+ if c == '\r' {
+ if *chars.peek().unwrap() != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ chars.next();
+ res.push('\n');
+ } else {
+ res.push(c);
}
}
if lit.len() == 1 {
(lit.as_bytes()[0], 1)
} else {
- assert!(lit.as_bytes()[0] == b'\\', err(0));
+ assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0));
let b = match lit.as_bytes()[1] {
b'"' => b'"',
b'n' => b'\n',
}
}
};
- return (b, 2);
+ (b, 2)
}
}
let error = |i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
+ fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
loop {
match it.peek().map(|x| x.1) {
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
if let Some(err) = err {
err!(diag, |span, diag| diag.span_err(span, err));
}
- return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
+ return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
}
}
if !self.obsolete_set.contains(&kind) &&
(error || self.sess.span_diagnostic.can_emit_warnings) {
- err.note(&format!("{}", desc));
+ err.note(desc);
self.obsolete_set.insert(kind);
}
err.emit();
fn next_desugared(&mut self) -> TokenAndSpan {
let (sp, name) = match self.next() {
TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
- tok @ _ => return tok,
+ tok => return tok,
};
let stripped = strip_doc_comment_decoration(&name.as_str());
}
impl Error {
- pub fn span_err<'a>(self, sp: Span, handler: &'a errors::Handler) -> DiagnosticBuilder<'a> {
+ pub fn span_err(self, sp: Span, handler: &errors::Handler) -> DiagnosticBuilder {
match self {
Error::FileNotFoundForModule { ref mod_name,
ref default_path,
}
fn next_tok(&mut self) -> TokenAndSpan {
- let mut next = match self.desugar_doc_comments {
- true => self.token_cursor.next_desugared(),
- false => self.token_cursor.next(),
+ let mut next = if self.desugar_doc_comments {
+ self.token_cursor.next_desugared()
+ } else {
+ self.token_cursor.next()
};
if next.sp == syntax_pos::DUMMY_SP {
next.sp = self.prev_span;
// This might be a sign we need a connect method on Iterator.
let b = i.next()
.map_or("".to_string(), |t| t.to_string());
- i.enumerate().fold(b, |mut b, (i, ref a)| {
+ i.enumerate().fold(b, |mut b, (i, a)| {
if tokens.len() > 2 && i == tokens.len() - 2 {
b.push_str(", or ");
} else if tokens.len() == 2 && i == tokens.len() - 2 {
token::CloseDelim(..) | token::Eof => break,
_ => {}
};
- match sep.sep {
- Some(ref t) => {
- if first {
- first = false;
- } else {
- if let Err(e) = self.expect(t) {
- fe(e);
- break;
- }
+ if let Some(ref t) = sep.sep {
+ if first {
+ first = false;
+ } else {
+ if let Err(e) = self.expect(t) {
+ fe(e);
+ break;
}
}
- _ => ()
}
if sep.trailing_sep_allowed && kets.iter().any(|k| self.check(k)) {
break;
let sum_span = ty.span.to(self.prev_span);
let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
- "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty));
+ "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
match ty.node {
TyKind::Rptr(ref lifetime, ref mut_ty) => {
pub fn is_named_argument(&mut self) -> bool {
let offset = match self.token {
- token::BinOp(token::And) => 1,
+ token::BinOp(token::And) |
token::AndAnd => 1,
_ if self.token.is_keyword(keywords::Mut) => 1,
_ => 0
let attrs = self.parse_outer_attributes()?;
let pats = self.parse_pats()?;
- let mut guard = None;
- if self.eat_keyword(keywords::If) {
- guard = Some(self.parse_expr()?);
- }
+ let guard = if self.eat_keyword(keywords::If) {
+ Some(self.parse_expr()?)
+ } else {
+ None
+ };
self.expect(&token::FatArrow)?;
let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?;
let lo = self.span;
let pat = self.parse_pat()?;
- let mut ty = None;
- if self.eat(&token::Colon) {
- ty = Some(self.parse_ty()?);
- }
+ let ty = if self.eat(&token::Colon) {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
let init = self.parse_initializer()?;
Ok(P(ast::Local {
ty: ty,
},
None => {
let unused_attrs = |attrs: &[_], s: &mut Self| {
- if attrs.len() > 0 {
+ if !attrs.is_empty() {
if s.prev_token_kind == PrevTokenKind::DocComment {
s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
} else {
self.expect(&token::Not)?;
}
- self.complain_if_pub_macro(&vis, prev_span);
+ self.complain_if_pub_macro(vis, prev_span);
// eat a matched-delimiter token tree:
*at_end = true;
}
}
} else {
- match polarity {
- ast::ImplPolarity::Negative => {
- // This is a negated type implementation
- // `impl !MyType {}`, which is not allowed.
- self.span_err(neg_span, "inherent implementation can't be negated");
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ // This is a negated type implementation
+ // `impl !MyType {}`, which is not allowed.
+ self.span_err(neg_span, "inherent implementation can't be negated");
}
None
};
let path_span = self.prev_span;
let help_msg = format!("make this visible only to module `{}` with `in`:", path);
self.expect(&token::CloseDelim(token::Paren))?; // `)`
- let mut err = self.span_fatal_help(path_span, &msg, &suggestion);
+ let mut err = self.span_fatal_help(path_span, msg, suggestion);
err.span_suggestion(path_span, &help_msg, format!("in {}", path));
err.emit(); // emit diagnostic, but continue with public visibility
}
pub fn len(self) -> usize {
if self == NoDelim { 0 } else { 1 }
}
+
+ pub fn is_empty(self) -> bool {
+ self == NoDelim
+ }
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub fn can_begin_expr(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_expr(ident), // value name or keyword
- OpenDelim(..) => true, // tuple, array or block
- Literal(..) => true, // literal
- Not => true, // operator not
- BinOp(Minus) => true, // unary minus
- BinOp(Star) => true, // dereference
- BinOp(Or) | OrOr => true, // closure
- BinOp(And) => true, // reference
- AndAnd => true, // double reference
- DotDot | DotDotDot => true, // range notation
- Lt | BinOp(Shl) => true, // associated path
- ModSep => true, // global path
+ OpenDelim(..) | // tuple, array or block
+ Literal(..) | // literal
+ Not | // operator not
+ BinOp(Minus) | // unary minus
+ BinOp(Star) | // dereference
+ BinOp(Or) | OrOr | // closure
+ BinOp(And) | // reference
+ AndAnd | // double reference
+ DotDot | DotDotDot | // range notation
+ Lt | BinOp(Shl) | // associated path
+ ModSep | // global path
Pound => true, // expression attributes
Interpolated(ref nt) => match **nt {
NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
pub fn can_begin_type(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_type(ident), // type name or keyword
- OpenDelim(Paren) => true, // tuple
- OpenDelim(Bracket) => true, // array
- Underscore => true, // placeholder
- Not => true, // never
- BinOp(Star) => true, // raw pointer
- BinOp(And) => true, // reference
- AndAnd => true, // double reference
- Question => true, // maybe bound in trait object
- Lifetime(..) => true, // lifetime bound in trait object
- Lt | BinOp(Shl) => true, // associated path
+ OpenDelim(Paren) | // tuple
+ OpenDelim(Bracket) | // array
+ Underscore | // placeholder
+ Not | // never
+ BinOp(Star) | // raw pointer
+ BinOp(And) | // reference
+ AndAnd | // double reference
+ Question | // maybe bound in trait object
+ Lifetime(..) | // lifetime bound in trait object
+ Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
Interpolated(ref nt) => match **nt {
NtIdent(..) | NtTy(..) | NtPath(..) => true,
//! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
//! and point-in-infinite-stream senses freely.
//!
-//! There is a parallel ring buffer, 'size', that holds the calculated size of
+//! There is a parallel ring buffer, `size`, that holds the calculated size of
//! each token. Why calculated? Because for Begin/End pairs, the "size"
//! includes everything between the pair. That is, the "size" of Begin is
//! actually the sum of the sizes of everything between Begin and the paired
-//! End that follows. Since that is arbitrarily far in the future, 'size' is
+//! End that follows. Since that is arbitrarily far in the future, `size` is
//! being rewritten regularly while the printer runs; in fact most of the
-//! machinery is here to work out 'size' entries on the fly (and give up when
+//! machinery is here to work out `size` entries on the fly (and give up when
//! they're so obviously over-long that "infinity" is a good enough
//! approximation for purposes of line breaking).
//!
//! The "input side" of the printer is managed as an abstract process called
-//! SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in
+//! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in
//! other words, the process of calculating 'size' entries.
//!
//! The "output side" of the printer is managed by an abstract process called
-//! PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
+//! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to
//! do with each token/size pair it consumes as it goes. It's trying to consume
//! the entire buffered window, but can't output anything until the size is >=
//! 0 (sizes are set to negative while they're pending calculation).
pub fn advance_right(&mut self) {
self.right += 1;
self.right %= self.buf_len;
- assert!(self.right != self.left);
+ assert_ne!(self.right, self.left);
}
pub fn advance_left(&mut self) -> io::Result<()> {
debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right,
token::CloseDelim(token::Bracket) => "]".to_string(),
token::OpenDelim(token::Brace) => "{".to_string(),
token::CloseDelim(token::Brace) => "}".to_string(),
- token::OpenDelim(token::NoDelim) => " ".to_string(),
+ token::OpenDelim(token::NoDelim) |
token::CloseDelim(token::NoDelim) => " ".to_string(),
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
let mut out = match lit {
token::Byte(b) => format!("b'{}'", b),
token::Char(c) => format!("'{}'", c),
- token::Float(c) => c.to_string(),
+ token::Float(c) |
token::Integer(c) => c.to_string(),
token::Str_(s) => format!("\"{}\"", s),
token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => match **nt {
- token::NtExpr(ref e) => expr_to_string(&e),
- token::NtMeta(ref e) => meta_item_to_string(&e),
- token::NtTy(ref e) => ty_to_string(&e),
- token::NtPath(ref e) => path_to_string(&e),
- token::NtItem(ref e) => item_to_string(&e),
- token::NtBlock(ref e) => block_to_string(&e),
- token::NtStmt(ref e) => stmt_to_string(&e),
- token::NtPat(ref e) => pat_to_string(&e),
+ token::NtExpr(ref e) => expr_to_string(e),
+ token::NtMeta(ref e) => meta_item_to_string(e),
+ token::NtTy(ref e) => ty_to_string(e),
+ token::NtPath(ref e) => path_to_string(e),
+ token::NtItem(ref e) => item_to_string(e),
+ token::NtBlock(ref e) => block_to_string(e),
+ token::NtStmt(ref e) => stmt_to_string(e),
+ token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(ref e) => ident_to_string(e.node),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
- token::NtArm(ref e) => arm_to_string(&e),
- token::NtImplItem(ref e) => impl_item_to_string(&e),
- token::NtTraitItem(ref e) => trait_item_to_string(&e),
- token::NtGenerics(ref e) => generics_to_string(&e),
- token::NtWhereClause(ref e) => where_clause_to_string(&e),
- token::NtArg(ref e) => arg_to_string(&e),
- token::NtVis(ref e) => vis_to_string(&e),
+ token::NtArm(ref e) => arm_to_string(e),
+ token::NtImplItem(ref e) => impl_item_to_string(e),
+ token::NtTraitItem(ref e) => trait_item_to_string(e),
+ token::NtGenerics(ref e) => generics_to_string(e),
+ token::NtWhereClause(ref e) => where_clause_to_string(e),
+ token::NtArg(ref e) => arg_to_string(e),
+ token::NtVis(ref e) => vis_to_string(e),
}
}
}
let mut result = None;
- if let &Some(ref lits) = self.literals()
- {
+ if let Some(ref lits) = *self.literals() {
while cur_lit < lits.len() {
let ltrl = (*lits)[cur_lit].clone();
if ltrl.pos > pos { break; }
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo)?;
- match self.next_lit(lit.span.lo) {
- Some(ref ltrl) => {
- return word(self.writer(), &(*ltrl).lit);
- }
- _ => ()
+ if let Some(ref ltrl) = self.next_lit(lit.span.lo) {
+ return word(self.writer(), &(*ltrl).lit);
}
match lit.node {
ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
self.popen()?;
self.commasep(Consistent,
&items[..],
- |s, i| s.print_meta_list_item(&i))?;
+ |s, i| s.print_meta_list_item(i))?;
self.pclose()?;
}
}
pub fn commasep_exprs(&mut self, b: Breaks,
exprs: &[P<ast::Expr>]) -> io::Result<()> {
- self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span)
+ self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &ast::Mod,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_inner_attributes(attrs)?;
for item in &_mod.items {
- self.print_item(&item)?;
+ self.print_item(item)?;
}
Ok(())
}
match ty.node {
ast::TyKind::Slice(ref ty) => {
word(&mut self.s, "[")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, "]")?;
}
ast::TyKind::Ptr(ref mt) => {
ast::TyKind::Tup(ref elts) => {
self.popen()?;
self.commasep(Inconsistent, &elts[..],
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
if elts.len() == 1 {
word(&mut self.s, ",")?;
}
}
ast::TyKind::Paren(ref typ) => {
self.popen()?;
- self.print_type(&typ)?;
+ self.print_type(typ)?;
self.pclose()?;
}
ast::TyKind::BareFn(ref f) => {
}
ast::TyKind::Array(ref ty, ref v) => {
word(&mut self.s, "[")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, "; ")?;
- self.print_expr(&v)?;
+ self.print_expr(v)?;
word(&mut self.s, "]")?;
}
ast::TyKind::Typeof(ref e) => {
word(&mut self.s, "typeof(")?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
word(&mut self.s, ")")?;
}
ast::TyKind::Infer => {
}
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&t)?;
+ self.print_type(t)?;
word(&mut self.s, ";")?;
self.end()?; // end the head-ibox
self.end() // end the outer cbox
self.head(&visibility_qualified(&item.vis, "extern crate"))?;
if let Some(p) = *optional_path {
let val = p.as_str();
- if val.contains("-") {
+ if val.contains('-') {
self.print_string(&val, ast::StrStyle::Cooked)?;
} else {
self.print_name(p)?;
}
ast::ItemKind::Use(ref vp) => {
self.head(&visibility_qualified(&item.vis, "use"))?;
- self.print_view_path(&vp)?;
+ self.print_view_path(vp)?;
word(&mut self.s, ";")?;
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
space(&mut self.s)?;
self.end()?; // end the head-ibox
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer cbox
}
self.head(&visibility_qualified(&item.vis, "const"))?;
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
space(&mut self.s)?;
self.end()?; // end the head-ibox
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer cbox
}
&item.vis
)?;
word(&mut self.s, " ")?;
- self.print_block_with_attrs(&body, &item.attrs)?;
+ self.print_block_with_attrs(body, &item.attrs)?;
}
ast::ItemKind::Mod(ref _mod) => {
self.head(&visibility_qualified(&item.vis, "mod"))?;
self.print_where_clause(¶ms.where_clause)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer ibox
}
}
ast::ItemKind::Struct(ref struct_def, ref generics) => {
self.head(&visibility_qualified(&item.vis, "struct"))?;
- self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+ self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::Union(ref struct_def, ref generics) => {
self.head(&visibility_qualified(&item.vis, "union"))?;
- self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+ self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
self.head("")?;
space(&mut self.s)?;
}
- match polarity {
- ast::ImplPolarity::Negative => {
- word(&mut self.s, "!")?;
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ word(&mut self.s, "!")?;
}
if let Some(ref t) = *opt_trait {
self.word_space("for")?;
}
- self.print_type(&ty)?;
+ self.print_type(ty)?;
self.print_where_clause(&generics.where_clause)?;
space(&mut self.s)?;
Some(ref d) => {
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&d)
+ self.print_expr(d)
}
_ => Ok(())
}
self.print_outer_attributes(&ti.attrs)?;
match ti.node {
ast::TraitItemKind::Const(ref ty, ref default) => {
- self.print_associated_const(ti.ident, &ty,
+ self.print_associated_const(ti.ident, ty,
default.as_ref().map(|expr| &**expr),
&ast::Visibility::Inherited)?;
}
self.print_defaultness(ii.defaultness)?;
match ii.node {
ast::ImplItemKind::Const(ref ty, ref expr) => {
- self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?;
+ self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?;
}
ast::ImplItemKind::Method(ref sig, ref body) => {
self.head("")?;
self.word_nbsp("let")?;
self.ibox(INDENT_UNIT)?;
- self.print_local_decl(&loc)?;
+ self.print_local_decl(loc)?;
self.end()?;
if let Some(ref init) = loc.init {
self.nbsp()?;
self.word_space("=")?;
- self.print_expr(&init)?;
+ self.print_expr(init)?;
}
word(&mut self.s, ";")?;
self.end()?;
}
- ast::StmtKind::Item(ref item) => self.print_item(&item)?,
+ ast::StmtKind::Item(ref item) => self.print_item(item)?,
ast::StmtKind::Expr(ref expr) => {
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
if parse::classify::expr_requires_semi_to_be_stmt(expr) {
word(&mut self.s, ";")?;
}
}
ast::StmtKind::Semi(ref expr) => {
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
word(&mut self.s, ";")?;
}
ast::StmtKind::Mac(ref mac) => {
let (ref mac, style, ref attrs) = **mac;
self.space_if_not_bol()?;
- self.print_outer_attributes(&attrs)?;
+ self.print_outer_attributes(attrs)?;
let delim = match style {
ast::MacStmtStyle::Braces => token::Brace,
_ => token::Paren
};
- self.print_mac(&mac, delim)?;
+ self.print_mac(mac, delim)?;
if style == ast::MacStmtStyle::Semicolon {
word(&mut self.s, ";")?;
}
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
self.maybe_print_comment(st.span.lo)?;
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
}
_ => self.print_stmt(st)?,
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else if ")?;
- self.print_expr(&i)?;
+ self.print_expr(i)?;
space(&mut self.s)?;
- self.print_block(&then)?;
+ self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "another else-if-let"
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else if let ")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
- self.print_block(&then)?;
+ self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "final else"
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else ")?;
- self.print_block(&b)
+ self.print_block(b)
}
// BLEAH, constraints would be great here
_ => {
binop: ast::BinOp) -> bool {
match sub_expr.node {
ast::ExprKind::Binary(ref sub_op, _, _) => {
- if AssocOp::from_ast_binop(sub_op.node).precedence() <
- AssocOp::from_ast_binop(binop.node).precedence() {
- true
- } else {
- false
- }
+ AssocOp::from_ast_binop(sub_op.node).precedence() <
+ AssocOp::from_ast_binop(binop.node).precedence()
}
_ => true
}
space(&mut self.s)?;
}
word(&mut self.s, "..")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
self.end()?;
}
_ => if !fields.is_empty() {
if !tys.is_empty() {
word(&mut self.s, "::<")?;
self.commasep(Inconsistent, tys,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
word(&mut self.s, ">")?;
}
self.print_call_post(base_args)
self.print_expr_vec(&exprs[..], attrs)?;
}
ast::ExprKind::Repeat(ref element, ref count) => {
- self.print_expr_repeat(&element, &count, attrs)?;
+ self.print_expr_repeat(element, count, attrs)?;
}
ast::ExprKind::Struct(ref path, ref fields, ref wth) => {
self.print_expr_struct(path, &fields[..], wth, attrs)?;
self.print_expr_tup(&exprs[..], attrs)?;
}
ast::ExprKind::Call(ref func, ref args) => {
- self.print_expr_call(&func, &args[..])?;
+ self.print_expr_call(func, &args[..])?;
}
ast::ExprKind::MethodCall(ident, ref tys, ref args) => {
self.print_expr_method_call(ident, &tys[..], &args[..])?;
}
ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
- self.print_expr_binary(op, &lhs, &rhs)?;
+ self.print_expr_binary(op, lhs, rhs)?;
}
ast::ExprKind::Unary(op, ref expr) => {
- self.print_expr_unary(op, &expr)?;
+ self.print_expr_unary(op, expr)?;
}
ast::ExprKind::AddrOf(m, ref expr) => {
- self.print_expr_addr_of(m, &expr)?;
+ self.print_expr_addr_of(m, expr)?;
}
ast::ExprKind::Lit(ref lit) => {
- self.print_literal(&lit)?;
+ self.print_literal(lit)?;
}
ast::ExprKind::Cast(ref expr, ref ty) => {
if let ast::ExprKind::Cast(..) = expr.node {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
} else {
- self.print_expr_maybe_paren(&expr)?;
+ self.print_expr_maybe_paren(expr)?;
}
space(&mut self.s)?;
self.word_space("as")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
ast::ExprKind::Type(ref expr, ref ty) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
- self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
+ self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
- self.print_if_let(&pat, &expr, &blk, elseopt.as_ref().map(|e| &**e))?;
+ self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::While(ref test, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("while")?;
- self.print_expr(&test)?;
+ self.print_expr(test)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("while let")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("for")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("in")?;
- self.print_expr(&iter)?;
+ self.print_expr(iter)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Loop(ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
}
self.head("loop")?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Match(ref expr, ref arms) => {
self.cbox(INDENT_UNIT)?;
self.ibox(4)?;
self.word_nbsp("match")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
self.bopen()?;
self.print_inner_attributes_no_trailing_hardbreak(attrs)?;
ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
self.print_capture_clause(capture_clause)?;
- self.print_fn_block_args(&decl)?;
+ self.print_fn_block_args(decl)?;
space(&mut self.s)?;
self.print_expr(body)?;
self.end()?; // need to close a box
self.cbox(INDENT_UNIT)?;
// head-box, will be closed by print-block after {
self.ibox(0)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Assign(ref lhs, ref rhs) => {
- self.print_expr(&lhs)?;
+ self.print_expr(lhs)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&rhs)?;
+ self.print_expr(rhs)?;
}
ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
- self.print_expr(&lhs)?;
+ self.print_expr(lhs)?;
space(&mut self.s)?;
word(&mut self.s, op.node.to_string())?;
self.word_space("=")?;
- self.print_expr(&rhs)?;
+ self.print_expr(rhs)?;
}
ast::ExprKind::Field(ref expr, id) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ".")?;
self.print_ident(id.node)?;
}
ast::ExprKind::TupField(ref expr, id) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ".")?;
self.print_usize(id.node)?;
}
ast::ExprKind::Index(ref expr, ref index) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, "[")?;
- self.print_expr(&index)?;
+ self.print_expr(index)?;
word(&mut self.s, "]")?;
}
ast::ExprKind::Range(ref start, ref end, limits) => {
- if let &Some(ref e) = start {
- self.print_expr(&e)?;
+ if let Some(ref e) = *start {
+ self.print_expr(e)?;
}
if limits == ast::RangeLimits::HalfOpen {
word(&mut self.s, "..")?;
} else {
word(&mut self.s, "...")?;
}
- if let &Some(ref e) = end {
- self.print_expr(&e)?;
+ if let Some(ref e) = *end {
+ self.print_expr(e)?;
}
}
ast::ExprKind::Path(None, ref path) => {
}
ast::ExprKind::Ret(ref result) => {
word(&mut self.s, "return")?;
- match *result {
- Some(ref expr) => {
- word(&mut self.s, " ")?;
- self.print_expr(&expr)?;
- }
- _ => ()
+ if let Some(ref expr) = *result {
+ word(&mut self.s, " ")?;
+ self.print_expr(expr)?;
}
}
ast::ExprKind::InlineAsm(ref a) => {
self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
s.popen()?;
- s.print_expr(&o)?;
+ s.print_expr(o)?;
s.pclose()?;
Ok(())
})?;
ast::ExprKind::Paren(ref e) => {
self.popen()?;
self.print_inner_attributes_inline(attrs)?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
self.pclose()?;
},
ast::ExprKind::Try(ref e) => {
ast::ExprKind::Catch(ref blk) => {
self.head("do catch")?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?
+ self.print_block_with_attrs(blk, attrs)?
}
}
self.ann.post(self, NodeExpr(expr))?;
self.print_pat(&loc.pat)?;
if let Some(ref ty) = loc.ty {
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
Ok(())
}
space(&mut self.s)?;
self.word_space("as")?;
let depth = path.segments.len() - qself.position;
- self.print_path(&path, false, depth, false)?;
+ self.print_path(path, false, depth, false)?;
}
word(&mut self.s, ">")?;
word(&mut self.s, "::")?;
self.commasep(
Inconsistent,
&data.types,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
comma = true;
}
self.commasep(
Inconsistent,
&data.inputs,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
word(&mut self.s, ")")?;
if let Some(ref ty) = data.output {
self.space_if_not_bol()?;
self.word_space("->")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
}
}
self.print_ident(path1.node)?;
if let Some(ref p) = *sub {
word(&mut self.s, "@")?;
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
}
PatKind::TupleStruct(ref path, ref elts, ddpos) => {
self.print_path(path, true, 0, false)?;
self.popen()?;
if let Some(ddpos) = ddpos {
- self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
word(&mut self.s, "..")?;
if ddpos != elts.len() {
word(&mut self.s, ",")?;
- self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
- self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
}
self.pclose()?;
}
PatKind::Tuple(ref elts, ddpos) => {
self.popen()?;
if let Some(ddpos) = ddpos {
- self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
word(&mut self.s, "..")?;
if ddpos != elts.len() {
word(&mut self.s, ",")?;
- self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
- self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
if elts.len() == 1 {
word(&mut self.s, ",")?;
}
}
PatKind::Box(ref inner) => {
word(&mut self.s, "box ")?;
- self.print_pat(&inner)?;
+ self.print_pat(inner)?;
}
PatKind::Ref(ref inner, mutbl) => {
word(&mut self.s, "&")?;
if mutbl == ast::Mutability::Mutable {
word(&mut self.s, "mut ")?;
}
- self.print_pat(&inner)?;
+ self.print_pat(inner)?;
}
PatKind::Lit(ref e) => self.print_expr(&**e)?,
PatKind::Range(ref begin, ref end, ref end_kind) => {
- self.print_expr(&begin)?;
+ self.print_expr(begin)?;
space(&mut self.s)?;
match *end_kind {
RangeEnd::Included => word(&mut self.s, "...")?,
RangeEnd::Excluded => word(&mut self.s, "..")?,
}
- self.print_expr(&end)?;
+ self.print_expr(end)?;
}
PatKind::Slice(ref before, ref slice, ref after) => {
word(&mut self.s, "[")?;
self.commasep(Inconsistent,
&before[..],
- |s, p| s.print_pat(&p))?;
+ |s, p| s.print_pat(p))?;
if let Some(ref p) = *slice {
if !before.is_empty() { self.word_space(",")?; }
if p.node != PatKind::Wild {
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
word(&mut self.s, "..")?;
if !after.is_empty() { self.word_space(",")?; }
}
self.commasep(Inconsistent,
&after[..],
- |s, p| s.print_pat(&p))?;
+ |s, p| s.print_pat(p))?;
word(&mut self.s, "]")?;
}
PatKind::Mac(ref m) => self.print_mac(m, token::Paren)?,
space(&mut self.s)?;
self.word_space("|")?;
}
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
space(&mut self.s)?;
if let Some(ref e) = arm.guard {
self.word_space("if")?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
space(&mut self.s)?;
}
self.word_space("=>")?;
match arm.body.node {
ast::ExprKind::Block(ref blk) => {
// the block will close the pattern's ibox
- self.print_block_unclosed_indent(&blk, INDENT_UNIT)?;
+ self.print_block_unclosed_indent(blk, INDENT_UNIT)?;
// If it is a user-provided unsafe block, print a comma after it
if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
self.print_mutability(m)?;
word(&mut self.s, "self")?;
self.word_space(":")?;
- self.print_type(&typ)
+ self.print_type(typ)
}
}
}
self.word_space("->")?;
match decl.output {
ast::FunctionRetTy::Ty(ref ty) => {
- self.print_type(&ty)?;
+ self.print_type(ty)?;
self.maybe_print_comment(ty.span.lo)
}
ast::FunctionRetTy::Default(..) => unreachable!(),
Some(ref default) => {
space(&mut self.s)?;
self.word_space("=")?;
- self.print_type(&default)
+ self.print_type(default)
}
_ => Ok(())
}
ref bounds,
..}) => {
self.print_formal_lifetime_list(bound_lifetimes)?;
- self.print_type(&bounded_ty)?;
+ self.print_type(bounded_ty)?;
self.print_bounds(":", bounds)?;
}
ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime,
match decl.output {
ast::FunctionRetTy::Default(..) => unreachable!(),
ast::FunctionRetTy::Ty(ref ty) =>
- self.print_type(&ty)?
+ self.print_type(ty)?
}
self.end()?;
if self.next_comment().is_none() {
hardbreak(&mut self.s)?;
}
- loop {
- match self.next_comment() {
- Some(ref cmnt) => {
- self.print_comment(cmnt)?;
- self.cur_cmnt_and_lit.cur_cmnt += 1;
- }
- _ => break
- }
+ while let Some(ref cmnt) = self.next_comment() {
+ self.print_comment(cmnt)?;
+ self.cur_cmnt_and_lit.cur_cmnt += 1;
}
Ok(())
}
use tokenstream::TokenStream;
/// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
/// The expanded code uses the unstable `#[prelude_import]` attribute.
fn ignored_span(sp: Span) -> Span {
let mark = Mark::fresh();
None => return krate,
};
- let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
+ let crate_name = Symbol::intern(&alt_std_name.unwrap_or_else(|| name.to_string()));
krate.module.items.insert(0, P(ast::Item {
attrs: vec![attr::mk_attr_outer(DUMMY_SP,
// Add a special __test module to the crate that will contain code
// generated for the test harness
let (mod_, reexport) = mk_test_module(&mut self.cx);
- match reexport {
- Some(re) => folded.module.items.push(re),
- None => {}
+ if let Some(re) = reexport {
+ folded.module.items.push(re)
}
folded.module.items.push(mod_);
folded
let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
- ident: sym.clone(),
+ ident: sym,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Mod(reexport_mod),
}
/// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
/// The expanded code calls some unstable functions in the test crate.
fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
Span { ctxt: cx.ctxt, ..sp }
}
}
- return has_test_attr && has_test_signature(i) == Yes;
+ has_test_attr && has_test_signature(i) == Yes
}
fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
`fn(&mut Bencher) -> ()`");
}
- return has_bench_attr && has_test_signature(i);
+ has_bench_attr && has_test_signature(i)
}
fn is_ignored(i: &ast::Item) -> bool {
ast::Unsafety::Normal,
dummy_spanned(ast::Constness::NotConst),
::abi::Abi::Rust, ast::Generics::default(), main_body);
- let main = P(ast::Item {
+ P(ast::Item {
ident: Ident::from_str("main"),
attrs: vec![main_attr],
id: ast::DUMMY_NODE_ID,
node: main,
vis: ast::Visibility::Public,
span: sp
- });
-
- return main;
+ })
}
fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
//! # Token Streams
//!
-//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
//! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
//!
//! ## Ownership
-//! TokenStreams are persistent data structures constructed as ropes with reference
-//! counted-children. In general, this means that calling an operation on a TokenStream
-//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
-//! the original. This essentially coerces TokenStreams into 'views' of their subparts,
-//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
+//! `TokenStreams` are persistent data structures constructed as ropes with reference
+//! counted-children. In general, this means that calling an operation on a `TokenStream`
+//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
+//! the original. This essentially coerces `TokenStream`s into 'views' of their subparts,
+//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original.
use syntax_pos::{BytePos, Span, DUMMY_SP};
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS token tree against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
+/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
///
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
iter_names
.filter_map(|&name| {
let dist = lev_distance(lookup, &name.as_str());
- match dist <= max_dist { // filter the unwanted cases
- true => Some((name, dist)),
- false => None,
+ if dist <= max_dist { // filter the unwanted cases
+ Some((name, dist))
+ } else {
+ None
}
})
.min_by_key(|&(_, val)| val) // extract the tuple containing the minimum edit distance
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
- let mut iter = f(e).into_iter();
+ let iter = f(e).into_iter();
read_i += 1;
- while let Some(e) = iter.next() {
+ for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
write_i += 1;
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
- let mut iter = f(e).into_iter();
+ let iter = f(e).into_iter();
read_i += 1;
- while let Some(e) = iter.next() {
+ for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
write_i += 1;
visitor.visit_ty(ty);
visitor.visit_expr(expression)
}
- TyKind::TraitObject(ref bounds) => {
- walk_list!(visitor, visit_ty_param_bound, bounds);
- }
+ TyKind::TraitObject(ref bounds) |
TyKind::ImplTrait(ref bounds) => {
walk_list!(visitor, visit_ty_param_bound, bounds);
}
walk_fn_decl(visitor, declaration);
visitor.visit_block(body);
}
- FnKind::Method(_, ref sig, _, body) => {
+ FnKind::Method(_, sig, _, body) => {
visitor.visit_generics(&sig.generics);
walk_fn_decl(visitor, declaration);
visitor.visit_block(body);
}
ExprKind::InlineAsm(ref ia) => {
for &(_, ref input) in &ia.inputs {
- visitor.visit_expr(&input)
+ visitor.visit_expr(input)
}
for output in &ia.outputs {
visitor.visit_expr(&output.expr)
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-struct Foo;
-
-fn main() {
- let a: Result<(), Foo> = Ok(());
- a.unwrap();
- //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
- //~| NOTE the following trait bounds were not satisfied: `Foo : std::fmt::Debug`
-}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41697. Using dump-mir was triggering
+// artificial cycles: during type-checking, we had to get the MIR for
+// the constant expressions in `[u8; 2]`, which in turn would trigger
+// an attempt to get the item-path, which in turn would request the
+// types of the impl, which would trigger a cycle. We supressed this
+// cycle now by forcing mir-dump to avoid asking for types of an impl.
+
+#![feature(rustc_attrs)]
+
+use std::sync::Arc;
+
+trait Foo {
+ fn get(&self) -> [u8; 2];
+}
+
+impl Foo for [u8; 2] {
+ fn get(&self) -> [u8; 2] {
+ *self
+ }
+}
+
+struct Bar<T: ?Sized>(T);
+
+fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
+ x
+}
+
+fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
+ x
+}
+
+fn main() {
+ let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
+ assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
+
+ let x: Arc<Foo + Send> = Arc::new([3, 4]);
+ assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
+}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-Zdump-mir=NEVER_MATCHED
-
-// Regression test for #41697. Using dump-mir was triggering
-// artificial cycles: during type-checking, we had to get the MIR for
-// the constant expressions in `[u8; 2]`, which in turn would trigger
-// an attempt to get the item-path, which in turn would request the
-// types of the impl, which would trigger a cycle. We supressed this
-// cycle now by forcing mir-dump to avoid asking for types of an impl.
-
-#![feature(rustc_attrs)]
-
-use std::sync::Arc;
-
-trait Foo {
- fn get(&self) -> [u8; 2];
-}
-
-impl Foo for [u8; 2] {
- fn get(&self) -> [u8; 2] {
- *self
- }
-}
-
-struct Bar<T: ?Sized>(T);
-
-fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
- x
-}
-
-fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
- x
-}
-
-fn main() {
- let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
- assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
-
- let x: Arc<Foo + Send> = Arc::new([3, 4]);
- assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// A compile-time map from identifiers to arbitrary (heterogeneous) expressions
+macro_rules! ident_map {
+ ( $name:ident = { $($key:ident => $e:expr,)* } ) => {
+ macro_rules! $name {
+ $(
+ ( $key ) => { $e };
+ )*
+ // Empty invocation expands to nothing. Needed when the map is empty.
+ () => {};
+ }
+ };
+}
+
+ident_map!(my_map = {
+ main => 0,
+});
+
+fn main() {
+ my_map!(main);
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41936. The coerce-unsized trait check in
+// coherence was using subtyping, which triggered variance
+// computation, which failed because it required type info for fields
+// that had not (yet) been computed.
+
+#![feature(unsize)]
+#![feature(coerce_unsized)]
+
+use std::{marker,ops};
+
+// Change the array to a non-array, and error disappears
+// Adding a new field to the end keeps the error
+struct LogDataBuf([u8;8]);
+
+struct Aref<T: ?Sized>
+{
+ // Inner structure triggers the error, removing the inner removes the message.
+ ptr: Box<ArefInner<T>>,
+}
+impl<T: ?Sized + marker::Unsize<U>, U: ?Sized> ops::CoerceUnsized<Aref<U>> for Aref<T> {}
+
+struct ArefInner<T: ?Sized>
+{
+ // Even with this field commented out, the error is raised.
+ data: T,
+}
+
+fn main(){}
pub fn rust0() {}
// @has - '//code' 'fn rust1()'
pub extern "Rust" fn rust1() {}
- // @has - '//code' 'extern fn c0()'
+ // @has - '//code' 'extern "C" fn c0()'
pub extern fn c0() {}
- // @has - '//code' 'extern fn c1()'
+ // @has - '//code' 'extern "C" fn c1()'
pub extern "C" fn c1() {}
// @has - '//code' 'extern "system" fn system0()'
pub extern "system" fn system0() {}
// @has - '//code' 'impl Bar for fn()'
impl Bar for fn() {}
-// @has - '//code' 'impl Bar for extern fn()'
+// @has - '//code' 'impl Bar for extern "C" fn()'
impl Bar for extern fn() {}
// @has - '//code' 'impl Bar for extern "system" fn()'
impl Bar for extern "system" fn() {}
extern crate rustdoc_ffi as lib;
-// @has ffi/fn.foreigner.html //pre 'pub unsafe extern fn foreigner(cold_as_ice: u32)'
+// @has ffi/fn.foreigner.html //pre 'pub unsafe extern "C" fn foreigner(cold_as_ice: u32)'
pub use lib::foreigner;
extern "C" {
- // @has ffi/fn.another.html //pre 'pub unsafe extern fn another(cold_as_ice: u32)'
+ // @has ffi/fn.another.html //pre 'pub unsafe extern "C" fn another(cold_as_ice: u32)'
pub fn another(cold_as_ice: u32);
}
extern {
// @has issue_22038/fn.foo1.html \
- // '//*[@class="rust fn"]' 'pub unsafe extern fn foo1()'
+ // '//*[@class="rust fn"]' 'pub unsafe extern "C" fn foo1()'
pub fn foo1();
}
}
// @has issue_22038/fn.bar.html \
-// '//*[@class="rust fn"]' 'pub extern fn bar()'
+// '//*[@class="rust fn"]' 'pub extern "C" fn bar()'
pub extern fn bar() {}
// @has issue_22038/fn.baz.html \
// except according to those terms.
extern "C" {
- // @has variadic/fn.foo.html //pre 'pub unsafe extern fn foo(x: i32, ...)'
+ // @has variadic/fn.foo.html //pre 'pub unsafe extern "C" fn foo(x: i32, ...)'
pub fn foo(x: i32, ...);
}
17 | once::<&str>("str").fuse().filter(|a: &str| true).count();
| ^^^^^
|
- = note: the method `count` exists but the following trait bounds were not satisfied: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`, `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
+ = note: the method `count` exists but the following trait bounds were not satisfied:
+ `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`
+ `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
error[E0281]: type mismatch: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53]` implements the trait `for<'r> std::ops::FnMut<(&'r str,)>`, but the trait `for<'r> std::ops::FnMut<(&'r &str,)>` is required
--> $DIR/issue-36053-2.rs:17:32
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo;
+
+fn main() {
+ let a: Result<(), Foo> = Ok(());
+ a.unwrap();
+ //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
+ //~| NOTE the method `unwrap` exists but the following trait bounds were not satisfied
+}
--- /dev/null
+error: no method named `unwrap` found for type `std::result::Result<(), Foo>` in the current scope
+ --> $DIR/method-help-unsatisfied-bound.rs:15:7
+ |
+15 | a.unwrap();
+ | ^^^^^^
+ |
+ = note: the method `unwrap` exists but the following trait bounds were not satisfied:
+ `Foo : std::fmt::Debug`
+
+error: aborting due to previous error
+
-Subproject commit 13d92c64d0153d95dbabeb49b828bbbef4b1bb34
+Subproject commit b007d82be9aa82ae69d382b392d91c151df16da2