]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #42046 - steveklabnik:update-books, r=frewsxcv
authorMark Simulacrum <mark.simulacrum@gmail.com>
Tue, 16 May 2017 23:31:52 +0000 (17:31 -0600)
committerGitHub <noreply@github.com>
Tue, 16 May 2017 23:31:52 +0000 (17:31 -0600)
Update the various books to latest

This includes a draft of chapter 20 of the book!

60 files changed:
CONTRIBUTING.md
configure
src/Cargo.lock
src/bootstrap/config.rs
src/bootstrap/config.toml.example
src/bootstrap/install.rs
src/librustc/infer/combine.rs
src/librustc/ty/maps.rs
src/librustc_mir/util/pretty.rs
src/librustc_typeck/check/method/suggest.rs
src/librustc_typeck/coherence/builtin.rs
src/librustdoc/html/format.rs
src/librustdoc/html/static/rustdoc.css
src/libstd/thread/local.rs
src/libstd/thread/mod.rs
src/libsyntax/ast.rs
src/libsyntax/attr.rs
src/libsyntax/codemap.rs
src/libsyntax/config.rs
src/libsyntax/diagnostics/plugin.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/source_util.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/feature_gate.rs
src/libsyntax/json.rs
src/libsyntax/parse/attr.rs
src/libsyntax/parse/classify.rs
src/libsyntax/parse/common.rs
src/libsyntax/parse/lexer/comments.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/obsolete.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pp.rs
src/libsyntax/print/pprust.rs
src/libsyntax/std_inject.rs
src/libsyntax/test.rs
src/libsyntax/tokenstream.rs
src/libsyntax/util/lev_distance.rs
src/libsyntax/util/move_map.rs
src/libsyntax/visit.rs
src/test/compile-fail/method-help-unsatisfied-bound.rs [deleted file]
src/test/mir-opt/issue-41697.rs [new file with mode: 0644]
src/test/run-pass/issue-41697.rs [deleted file]
src/test/run-pass/issue-41803.rs [new file with mode: 0644]
src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs [new file with mode: 0644]
src/test/rustdoc/extern-impl.rs
src/test/rustdoc/ffi.rs
src/test/rustdoc/issue-22038.rs
src/test/rustdoc/variadic.rs
src/test/ui/mismatched_types/issue-36053-2.stderr
src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs [new file with mode: 0644]
src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr [new file with mode: 0644]
src/tools/cargo

index 0314a5dfd8d02a5b283d317ef83cadad445fd8ce..8f121f8d6ed9b54434c5f1bb4e23a149a42381d6 100644 (file)
@@ -177,7 +177,7 @@ python x.py test src/test/rustdoc
 python x.py build src/libcore --stage 0
 ```
 
-You can explore the build system throught the various `--help` pages for each
+You can explore the build system through the various `--help` pages for each
 subcommand. For example to learn more about a command you can run:
 
 ```
index db41f0dfb94f388be9b8c405fa34def58bf29b62..af59d5b0bb88977d2c512af7fa1f1c096ed398ea 100755 (executable)
--- a/configure
+++ b/configure
@@ -519,6 +519,7 @@ valopt_nosave host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples"
 valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples"
 valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH"
 valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH"
+valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries"
 
 # On Windows this determines root of the subtree for target libraries.
 # Host runtime libs always go to 'bin'.
@@ -710,6 +711,7 @@ envopt LDFLAGS
 CFG_PREFIX=${CFG_PREFIX%/}
 CFG_MANDIR=${CFG_MANDIR%/}
 CFG_DOCDIR=${CFG_DOCDIR%/}
+CFG_BINDIR=${CFG_BINDIR%/}
 CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
 CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
 
@@ -750,6 +752,7 @@ putvar CFG_X86_64_LINUX_ANDROID_NDK
 putvar CFG_NACL_CROSS_PATH
 putvar CFG_MANDIR
 putvar CFG_DOCDIR
+putvar CFG_BINDIR
 putvar CFG_USING_LIBCPP
 
 msg
index 6d7b13f66f7eaf3a9a6440c0f8c09a81e32c5282..c9de8f9900865a7f12e1417f8d3955f21a0599d1 100644 (file)
@@ -345,7 +345,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "lzma-sys"
-version = "0.1.3"
+version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1139,7 +1139,7 @@ name = "xz2"
 version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "lzma-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -1173,7 +1173,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
 "checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
 "checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
-"checksum lzma-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c5eaaa53b35fa17482ee2c001b04242827b47ae0faba72663fee3dee32366248"
+"checksum lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "fedff6a5cbb24494ec6ee4784e9ac5c187161fede04c7767d49bf87544013afa"
 "checksum mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "f1e2e9d848514dcfad4195788d0d42ae5153a477c191d75d5b84fab10f222fbd"
 "checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
 "checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726"
index 9c536111811aab1d41a261c3130e4dc862ddad9c..0fb597564e33de15c2bc6defc8fd35273dfb43df 100644 (file)
@@ -99,7 +99,9 @@ pub struct Config {
     // Fallback musl-root for all targets
     pub musl_root: Option<PathBuf>,
     pub prefix: Option<PathBuf>,
+    pub sysconfdir: Option<PathBuf>,
     pub docdir: Option<PathBuf>,
+    pub bindir: Option<PathBuf>,
     pub libdir: Option<PathBuf>,
     pub libdir_relative: Option<PathBuf>,
     pub mandir: Option<PathBuf>,
@@ -165,9 +167,11 @@ struct Build {
 #[derive(RustcDecodable, Default, Clone)]
 struct Install {
     prefix: Option<String>,
-    mandir: Option<String>,
+    sysconfdir: Option<String>,
     docdir: Option<String>,
+    bindir: Option<String>,
     libdir: Option<String>,
+    mandir: Option<String>,
 }
 
 /// TOML representation of how the LLVM build is configured.
@@ -315,9 +319,11 @@ pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
 
         if let Some(ref install) = toml.install {
             config.prefix = install.prefix.clone().map(PathBuf::from);
-            config.mandir = install.mandir.clone().map(PathBuf::from);
+            config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from);
             config.docdir = install.docdir.clone().map(PathBuf::from);
+            config.bindir = install.bindir.clone().map(PathBuf::from);
             config.libdir = install.libdir.clone().map(PathBuf::from);
+            config.mandir = install.mandir.clone().map(PathBuf::from);
         }
 
         if let Some(ref llvm) = toml.llvm {
@@ -523,9 +529,15 @@ macro_rules! check {
                 "CFG_PREFIX" => {
                     self.prefix = Some(PathBuf::from(value));
                 }
+                "CFG_SYSCONFDIR" => {
+                    self.sysconfdir = Some(PathBuf::from(value));
+                }
                 "CFG_DOCDIR" => {
                     self.docdir = Some(PathBuf::from(value));
                 }
+                "CFG_BINDIR" => {
+                    self.bindir = Some(PathBuf::from(value));
+                }
                 "CFG_LIBDIR" => {
                     self.libdir = Some(PathBuf::from(value));
                 }
index 25da976a555e900653a45eee70d4751c7353edc8..df180be4e27ab0836b6df75d4b939aeac3c4fe7d 100644 (file)
 # Instead of installing to /usr/local, install to this path instead.
 #prefix = "/usr/local"
 
+# Where to install system configuration files
+# If this is a relative path, it will get installed in `prefix` above
+#sysconfdir = "/etc"
+
+# Where to install documentation in `prefix` above
+#docdir = "share/doc/rust"
+
+# Where to install binaries in `prefix` above
+#bindir = "bin"
+
 # Where to install libraries in `prefix` above
 #libdir = "lib"
 
 # Where to install man pages in `prefix` above
 #mandir = "share/man"
 
-# Where to install documentation in `prefix` above
-#docdir = "share/doc/rust"
-
 # =============================================================================
 # Options for compiling Rust code itself
 # =============================================================================
index c805522fbf588d06b4676bb8bd0647fa2c1aea54..386b001971bad43c978fc91984b9537c119f41ae 100644 (file)
 /// Installs everything.
 pub fn install(build: &Build, stage: u32, host: &str) {
     let prefix_default = PathBuf::from("/usr/local");
+    let sysconfdir_default = PathBuf::from("/etc");
     let docdir_default = PathBuf::from("share/doc/rust");
-    let mandir_default = PathBuf::from("share/man");
+    let bindir_default = PathBuf::from("bin");
     let libdir_default = PathBuf::from("lib");
+    let mandir_default = PathBuf::from("share/man");
     let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
+    let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
     let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
+    let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
     let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
     let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
 
+    let sysconfdir = prefix.join(sysconfdir);
     let docdir = prefix.join(docdir);
+    let bindir = prefix.join(bindir);
     let libdir = prefix.join(libdir);
     let mandir = prefix.join(mandir);
 
     let destdir = env::var_os("DESTDIR").map(PathBuf::from);
 
     let prefix = add_destdir(&prefix, &destdir);
+    let sysconfdir = add_destdir(&sysconfdir, &destdir);
     let docdir = add_destdir(&docdir, &destdir);
+    let bindir = add_destdir(&bindir, &destdir);
     let libdir = add_destdir(&libdir, &destdir);
     let mandir = add_destdir(&mandir, &destdir);
 
@@ -47,29 +55,35 @@ pub fn install(build: &Build, stage: u32, host: &str) {
     t!(fs::create_dir_all(&empty_dir));
     if build.config.docs {
         install_sh(&build, "docs", "rust-docs", &build.rust_package_vers(),
-                   stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
     }
 
     for target in build.config.target.iter() {
         install_sh(&build, "std", "rust-std", &build.rust_package_vers(),
-                   stage, target, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, target, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
     }
 
     if build.config.extended {
         install_sh(&build, "cargo", "cargo", &build.cargo_package_vers(),
-                   stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
         install_sh(&build, "rls", "rls", &build.rls_package_vers(),
-                   stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
     }
 
     install_sh(&build, "rustc", "rustc", &build.rust_package_vers(),
-               stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+               stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+               &mandir, &empty_dir);
 
     t!(fs::remove_dir_all(&empty_dir));
 }
 
 fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u32, host: &str,
-              prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
+              prefix: &Path, sysconfdir: &Path, docdir: &Path, bindir: &Path, libdir: &Path,
+              mandir: &Path, empty_dir: &Path) {
     println!("Install {} stage{} ({})", package, stage, host);
     let package_name = format!("{}-{}-{}", name, version, host);
 
@@ -77,7 +91,9 @@ fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u3
     cmd.current_dir(empty_dir)
        .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
        .arg(format!("--prefix={}", sanitize_sh(prefix)))
+       .arg(format!("--sysconfdir={}", sanitize_sh(sysconfdir)))
        .arg(format!("--docdir={}", sanitize_sh(docdir)))
+       .arg(format!("--bindir={}", sanitize_sh(bindir)))
        .arg(format!("--libdir={}", sanitize_sh(libdir)))
        .arg(format!("--mandir={}", sanitize_sh(mandir)))
        .arg("--disable-ldconfig");
index 18909a784d6106298aec4d8ad3b7b5e3b5250bd2..aabb6aff55140c4138efecfa741db1245b2e0e4b 100644 (file)
 use super::InferCtxt;
 use super::{MiscVariable, TypeTrace};
 
+use hir::def_id::DefId;
 use ty::{IntType, UintType};
 use ty::{self, Ty, TyCtxt};
 use ty::error::TypeError;
 use ty::relate::{self, Relate, RelateResult, TypeRelation};
+use ty::subst::Substs;
 use traits::{Obligation, PredicateObligations};
 
 use syntax::ast;
@@ -336,6 +338,23 @@ fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
         Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?))
     }
 
+    fn relate_item_substs(&mut self,
+                          item_def_id: DefId,
+                          a_subst: &'tcx Substs<'tcx>,
+                          b_subst: &'tcx Substs<'tcx>)
+                          -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+    {
+        if self.ambient_variance == ty::Variance::Invariant {
+            // Avoid fetching the variance if we are in an invariant
+            // context; no need, and it can induce dependency cycles
+            // (e.g. #41849).
+            relate::relate_substs(self, None, a_subst, b_subst)
+        } else {
+            let opt_variances = self.tcx().variances_of(item_def_id);
+            relate::relate_substs(self, Some(&opt_variances), a_subst, b_subst)
+        }
+    }
+
     fn relate_with_variance<T: Relate<'tcx>>(&mut self,
                                              variance: ty::Variance,
                                              a: &T,
index 1fd9e8f73756e830ca1d0f238107770f03464348..f448ca8934732779431da7cc94f9e560203dde3c 100644 (file)
@@ -592,7 +592,7 @@ pub struct Maps<$tcx> {
      output: $output:tt) => {
         define_map_struct! {
             tcx: $tcx,
-            ready: ([pub] $attrs $name),
+            ready: ([] $attrs $name),
             input: ($($input)*),
             output: $output
         }
index 14f277d1767bbc3faf3915ddec606226c46d6327..eaba573dcd2e582d21dfdc056e87ba601d820e94 100644 (file)
@@ -324,7 +324,9 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
         MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?
     }
 
-    write!(w, " {}", tcx.node_path_str(src.item_id()))?;
+    item_path::with_forced_impl_filename_line(|| { // see notes on #41697 elsewhere
+        write!(w, " {}", tcx.node_path_str(src.item_id()))
+    })?;
 
     if let MirSource::Fn(_) = src {
         write!(w, "(")?;
index c7ec379b0de25d9fc1181a959ebe42a3407ebd8e..7e70bb92cd6e0f011b57c3baf236412490b09641 100644 (file)
@@ -251,9 +251,9 @@ macro_rules! report_function {
                     let bound_list = unsatisfied_predicates.iter()
                         .map(|p| format!("`{} : {}`", p.self_ty(), p))
                         .collect::<Vec<_>>()
-                        .join("");
+                        .join("\n");
                     err.note(&format!("the method `{}` exists but the following trait bounds \
-                                       were not satisfied: {}",
+                                       were not satisfied:\n{}",
                                       item_name,
                                       bound_list));
                 }
index d40a68e605690d880e34c4bd24a73dafd7004ff1..556bd618c78cbb22e49b90b503f1daad2577d4a6 100644 (file)
@@ -249,6 +249,45 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                     return err_info;
                 }
 
+                // Here we are considering a case of converting
+                // `S<P0...Pn>` to S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
+                // which acts like a pointer to `U`, but carries along some extra data of type `T`:
+                //
+                //     struct Foo<T, U> {
+                //         extra: T,
+                //         ptr: *mut U,
+                //     }
+                //
+                // We might have an impl that allows (e.g.) `Foo<T, [i32; 3]>` to be unsized
+                // to `Foo<T, [i32]>`. That impl would look like:
+                //
+                //   impl<T, U: Unsize<V>, V> CoerceUnsized<Foo<T, V>> for Foo<T, U> {}
+                //
+                // Here `U = [i32; 3]` and `V = [i32]`. At runtime,
+                // when this coercion occurs, we would be changing the
+                // field `ptr` from a thin pointer of type `*mut [i32;
+                // 3]` to a fat pointer of type `*mut [i32]` (with
+                // extra data `3`).  **The purpose of this check is to
+                // make sure that we know how to do this conversion.**
+                //
+                // To check if this impl is legal, we would walk down
+                // the fields of `Foo` and consider their types with
+                // both substitutes. We are looking to find that
+                // exactly one (non-phantom) field has changed its
+                // type, which we will expect to be the pointer that
+                // is becoming fat (we could probably generalize this
+                // to mutiple thin pointers of the same type becoming
+                // fat, but we don't). In this case:
+                //
+                // - `extra` has type `T` before and type `T` after
+                // - `ptr` has type `*mut U` before and type `*mut V` after
+                //
+                // Since just one field changed, we would then check
+                // that `*mut U: CoerceUnsized<*mut V>` is implemented
+                // (in other words, that we know how to do this
+                // conversion). This will work out because `U:
+                // Unsize<V>`, and we have a builtin rule that `*mut
+                // U` can be coerced to `*mut V` if `U: Unsize<V>`.
                 let fields = &def_a.struct_variant().fields;
                 let diff_fields = fields.iter()
                     .enumerate()
@@ -260,8 +299,16 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                             return None;
                         }
 
-                        // Ignore fields that aren't significantly changed
-                        if let Ok(ok) = infcx.sub_types(false, &cause, b, a) {
+                        // Ignore fields that aren't changed; it may
+                        // be that we could get away with subtyping or
+                        // something more accepting, but we use
+                        // equality because we want to be able to
+                        // perform this check without computing
+                        // variance where possible. (This is because
+                        // we may have to evaluate constraint
+                        // expressions in the course of execution.)
+                        // See e.g. #41936.
+                        if let Ok(ok) = infcx.eq_types(false, &cause, b, a) {
                             if ok.obligations.is_empty() {
                                 return None;
                             }
index 5db82e23bbf1eb0244d6500e8c0533f21e6f0105..612793e2567f1c35dbf5bd47684578559e0d8fde 100644 (file)
@@ -1177,7 +1177,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         let quot = if f.alternate() { "\"" } else { "&quot;" };
         match self.0 {
             Abi::Rust => Ok(()),
-            Abi::C => write!(f, "extern "),
             abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()),
         }
     }
index 60ea6ed1d8d58753f6f5643b1590bde5f4c4372f..570a1980782131131ff3c05dbe52e6ad0fa621a1 100644 (file)
@@ -617,6 +617,11 @@ a.test-arrow:hover{
        top: 0;
 }
 
+h3 > .collapse-toggle, h4 > .collapse-toggle {
+       font-size: 0.8em;
+       top: 5px;
+}
+
 .toggle-wrapper > .collapse-toggle {
        left: -24px;
        margin-top: 0px;
index e2b22b1d89f045496e267eed902d2b8e3adf2fe4..c2c6e6cf87dff4ad71fcde7e8f721f342ef3d37c 100644 (file)
 /// A thread local storage key which owns its contents.
 ///
 /// This key uses the fastest possible implementation available to it for the
-/// target platform. It is instantiated with the `thread_local!` macro and the
-/// primary method is the `with` method.
+/// target platform. It is instantiated with the [`thread_local!`] macro and the
+/// primary method is the [`with`] method.
 ///
-/// The `with` method yields a reference to the contained value which cannot be
+/// The [`with`] method yields a reference to the contained value which cannot be
 /// sent across threads or escape the given closure.
 ///
 /// # Initialization and Destruction
 ///
-/// Initialization is dynamically performed on the first call to `with()`
-/// within a thread, and values that implement `Drop` get destructed when a
+/// Initialization is dynamically performed on the first call to [`with`]
+/// within a thread, and values that implement [`Drop`] get destructed when a
 /// thread exits. Some caveats apply, which are explained below.
 ///
 /// # Examples
 /// 3. On macOS, initializing TLS during destruction of other TLS slots can
 ///    sometimes cancel *all* destructors for the current thread, whether or not
 ///    the slots have already had their destructors run or not.
+///
+/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+/// [`thread_local!`]: ../../std/macro.thread_local.html
+/// [`Drop`]: ../../std/ops/trait.Drop.html
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct LocalKey<T: 'static> {
     // This outer `LocalKey<T>` type is what's going to be stored in statics,
@@ -106,7 +110,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-/// Declare a new thread local storage key of type `std::thread::LocalKey`.
+/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
 ///
 /// # Syntax
 ///
@@ -124,8 +128,10 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 /// # fn main() {}
 /// ```
 ///
-/// See [LocalKey documentation](thread/struct.LocalKey.html) for more
+/// See [LocalKey documentation][`std::thread::LocalKey`] for more
 /// information.
+///
+/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
 #[macro_export]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[allow_internal_unstable]
@@ -195,11 +201,13 @@ fn __getit() -> $crate::option::Option<
 #[derive(Debug, Eq, PartialEq, Copy, Clone)]
 pub enum LocalKeyState {
     /// All keys are in this state whenever a thread starts. Keys will
-    /// transition to the `Valid` state once the first call to `with` happens
+    /// transition to the `Valid` state once the first call to [`with`] happens
     /// and the initialization expression succeeds.
     ///
     /// Keys in the `Uninitialized` state will yield a reference to the closure
-    /// passed to `with` so long as the initialization routine does not panic.
+    /// passed to [`with`] so long as the initialization routine does not panic.
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
     Uninitialized,
 
     /// Once a key has been accessed successfully, it will enter the `Valid`
@@ -208,7 +216,9 @@ pub enum LocalKeyState {
     /// `Destroyed` state.
     ///
     /// Keys in the `Valid` state will be guaranteed to yield a reference to the
-    /// closure passed to `with`.
+    /// closure passed to [`with`].
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
     Valid,
 
     /// When a thread exits, the destructors for keys will be run (if
@@ -216,7 +226,9 @@ pub enum LocalKeyState {
     /// destructor has run, a key is in the `Destroyed` state.
     ///
     /// Keys in the `Destroyed` states will trigger a panic when accessed via
-    /// `with`.
+    /// [`with`].
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
     Destroyed,
 }
 
@@ -283,23 +295,26 @@ unsafe fn init(&self, slot: &UnsafeCell<Option<T>>) -> &T {
     /// Query the current state of this key.
     ///
     /// A key is initially in the `Uninitialized` state whenever a thread
-    /// starts. It will remain in this state up until the first call to `with`
+    /// starts. It will remain in this state up until the first call to [`with`]
     /// within a thread has run the initialization expression successfully.
     ///
     /// Once the initialization expression succeeds, the key transitions to the
-    /// `Valid` state which will guarantee that future calls to `with` will
+    /// `Valid` state which will guarantee that future calls to [`with`] will
     /// succeed within the thread.
     ///
     /// When a thread exits, each key will be destroyed in turn, and as keys are
     /// destroyed they will enter the `Destroyed` state just before the
     /// destructor starts to run. Keys may remain in the `Destroyed` state after
     /// destruction has completed. Keys without destructors (e.g. with types
-    /// that are `Copy`), may never enter the `Destroyed` state.
+    /// that are [`Copy`]), may never enter the `Destroyed` state.
     ///
     /// Keys in the `Uninitialized` state can be accessed so long as the
     /// initialization does not panic. Keys in the `Valid` state are guaranteed
     /// to be able to be accessed. Keys in the `Destroyed` state will panic on
-    /// any call to `with`.
+    /// any call to [`with`].
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+    /// [`Copy`]: ../../std/marker/trait.Copy.html
     #[unstable(feature = "thread_local_state",
                reason = "state querying was recently added",
                issue = "27716")]
index 200368be275c335e81b03e6b986fb6a8ded6c800..154406a1d8bd78ca29965e9f54f49f4af51fc896 100644 (file)
 // Builder
 ////////////////////////////////////////////////////////////////////////////////
 
-/// Thread configuration. Provides detailed control over the properties
-/// and behavior of new threads.
+/// Thread factory, which can be used in order to configure the properties of
+/// a new thread.
+///
+/// Methods can be chained on it in order to configure it.
+///
+/// The two configurations available are:
+///
+/// - [`name`]: allows to give a name to the thread which is currently
+///   only used in `panic` messages.
+/// - [`stack_size`]: specifies the desired stack size. Note that this can
+///   be overriden by the OS.
+///
+/// If the [`stack_size`] field is not specified, the stack size
+/// will be the `RUST_MIN_STACK` environment variable. If it is
+/// not specified either, a sensible default will be set.
+///
+/// If the [`name`] field is not specified, the thread will not be named.
+///
+/// The [`spawn`] method will take ownership of the builder and create an
+/// [`io::Result`] to the thread handle with the given configuration.
+///
+/// The [`thread::spawn`] free function uses a `Builder` with default
+/// configuration and [`unwrap`]s its return value.
+///
+/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want
+/// to recover from a failure to launch a thread, indeed the free function will
+/// panick where the `Builder` method will return a [`io::Result`].
 ///
 /// # Examples
 ///
 ///
 /// handler.join().unwrap();
 /// ```
+///
+/// [`thread::spawn`]: ../../std/thread/fn.spawn.html
+/// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
+/// [`name`]: ../../std/thread/struct.Builder.html#method.name
+/// [`spawn`]: ../../std/thread/struct.Builder.html#method.spawn
+/// [`io::Result`]: ../../std/io/type.Result.html
+/// [`unwrap`]: ../../std/result/enum.Result.html#method.unwrap
 #[stable(feature = "rust1", since = "1.0.0")]
 #[derive(Debug)]
 pub struct Builder {
@@ -209,11 +241,6 @@ impl Builder {
     /// Generates the base configuration for spawning a thread, from which
     /// configuration methods can be chained.
     ///
-    /// If the [`stack_size`] field is not specified, the stack size
-    /// will be the `RUST_MIN_STACK` environment variable.  If it is
-    /// not specified either, a sensible default will be set (2MB as
-    /// of the writting of this doc).
-    ///
     /// # Examples
     ///
     /// ```
@@ -229,8 +256,6 @@ impl Builder {
     ///
     /// handler.join().unwrap();
     /// ```
-    ///
-    /// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn new() -> Builder {
         Builder {
@@ -280,9 +305,10 @@ pub fn stack_size(mut self, size: usize) -> Builder {
         self
     }
 
-    /// Spawns a new thread, and returns a join handle for it.
+    /// Spawns a new thread by taking ownership of the `Builder`, and returns an
+    /// [`io::Result`] to its [`JoinHandle`].
     ///
-    /// The child thread may outlive the parent (unless the parent thread
+    /// The spawned thread may outlive the caller (unless the caller thread
     /// is the main thread; the whole process is terminated when the main
     /// thread finishes). The join handle can be used to block on
     /// termination of the child thread, including recovering its panics.
@@ -297,6 +323,7 @@ pub fn stack_size(mut self, size: usize) -> Builder {
     ///
     /// [`spawn`]: ../../std/thread/fn.spawn.html
     /// [`io::Result`]: ../../std/io/type.Result.html
+    /// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
     ///
     /// # Examples
     ///
@@ -468,6 +495,23 @@ pub fn current() -> Thread {
 
 /// Cooperatively gives up a timeslice to the OS scheduler.
 ///
+/// This is used when the programmer knows that the thread will have nothing
+/// to do for some time, and thus avoid wasting computing time.
+///
+/// For example when polling on a resource, it is common to check that it is
+/// available, and if not to yield in order to avoid busy waiting.
+///
+/// Thus the pattern of `yield`ing after a failed poll is rather common when
+/// implementing low-level shared resources or synchronization primitives.
+///
+/// However programmers will usualy prefer to use, [`channel`]s, [`Condvar`]s,
+/// [`Mutex`]es or [`join`] for their synchronisation routines, as they avoid
+/// thinking about thread schedulling.
+///
+/// Note that [`channel`]s for example are implemented using this primitive.
+/// Indeed when you call `send` or `recv`, which are blocking, they will yield
+/// if the channel is not available.
+///
 /// # Examples
 ///
 /// ```
@@ -475,6 +519,12 @@ pub fn current() -> Thread {
 ///
 /// thread::yield_now();
 /// ```
+///
+/// [`channel`]: ../../std/sync/mpsc/index.html
+/// [`spawn`]: ../../std/thread/fn.spawn.html
+/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
+/// [`Mutex`]: ../../std/sync/struct.Mutex.html
+/// [`Condvar`]: ../../std/sync/struct.Condvar.html
 #[stable(feature = "rust1", since = "1.0.0")]
 pub fn yield_now() {
     imp::Thread::yield_now()
index e5bb02fe08230253ddfa15006aa864a9879c0a9d..24ce99208ed11bc9e89cda6408003e12fa3207d2 100644 (file)
@@ -715,7 +715,7 @@ pub fn add_trailing_semicolon(mut self) -> Self {
             StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| {
                 (mac, MacStmtStyle::Semicolon, attrs)
             })),
-            node @ _ => node,
+            node => node,
         };
         self
     }
@@ -1076,16 +1076,16 @@ pub fn is_str(&self) -> bool {
     pub fn is_unsuffixed(&self) -> bool {
         match *self {
             // unsuffixed variants
-            LitKind::Str(..) => true,
-            LitKind::ByteStr(..) => true,
-            LitKind::Byte(..) => true,
-            LitKind::Char(..) => true,
-            LitKind::Int(_, LitIntType::Unsuffixed) => true,
-            LitKind::FloatUnsuffixed(..) => true,
+            LitKind::Str(..) |
+            LitKind::ByteStr(..) |
+            LitKind::Byte(..) |
+            LitKind::Char(..) |
+            LitKind::Int(_, LitIntType::Unsuffixed) |
+            LitKind::FloatUnsuffixed(..) |
             LitKind::Bool(..) => true,
             // suffixed variants
-            LitKind::Int(_, LitIntType::Signed(..)) => false,
-            LitKind::Int(_, LitIntType::Unsigned(..)) => false,
+            LitKind::Int(_, LitIntType::Signed(..)) |
+            LitKind::Int(_, LitIntType::Unsigned(..)) |
             LitKind::Float(..) => false,
         }
     }
index 0980b73e80c5c5d8125973de01fb8586db045b2d..45f891d8dc56db911f325fd6ff90b4034066e834 100644 (file)
@@ -112,7 +112,7 @@ impl NestedMetaItem {
     /// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem.
     pub fn meta_item(&self) -> Option<&MetaItem> {
         match self.node {
-            NestedMetaItemKind::MetaItem(ref item) => Some(&item),
+            NestedMetaItemKind::MetaItem(ref item) => Some(item),
             _ => None
         }
     }
@@ -120,7 +120,7 @@ pub fn meta_item(&self) -> Option<&MetaItem> {
     /// Returns the Lit if self is a NestedMetaItemKind::Literal.
     pub fn literal(&self) -> Option<&Lit> {
         match self.node {
-            NestedMetaItemKind::Literal(ref lit) => Some(&lit),
+            NestedMetaItemKind::Literal(ref lit) => Some(lit),
             _ => None
         }
     }
@@ -259,7 +259,7 @@ pub fn value_str(&self) -> Option<Symbol> {
         match self.node {
             MetaItemKind::NameValue(ref v) => {
                 match v.node {
-                    LitKind::Str(ref s, _) => Some((*s).clone()),
+                    LitKind::Str(ref s, _) => Some(*s),
                     _ => None,
                 }
             },
@@ -1217,9 +1217,10 @@ fn token(&self) -> Token {
                 Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
             }
             LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
-            LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
-                true => "true",
-                false => "false",
+            LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
+                "true"
+            } else {
+                "false"
             }))),
         }
     }
@@ -1261,7 +1262,7 @@ fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F)
 
 impl HasAttrs for Vec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
-        &self
+        self
     }
     fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
         f(self)
@@ -1270,7 +1271,7 @@ fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
 
 impl HasAttrs for ThinVec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
-        &self
+        self
     }
     fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
         f(self.into()).into()
index 0c8be1d4f24591822ab0686d3851a912b5c8b457..d32c3ec5f46b1f063bbcfa26ebea609d7b158178 100644 (file)
@@ -485,7 +485,7 @@ pub fn span_until_char(&self, sp: Span, c: char) -> Span {
         match self.span_to_snippet(sp) {
             Ok(snippet) => {
                 let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
-                if snippet.len() > 0 && !snippet.contains('\n') {
+                if !snippet.is_empty() && !snippet.contains('\n') {
                     Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
                 } else {
                     sp
@@ -502,7 +502,7 @@ pub fn def_span(&self, sp: Span) -> Span {
     pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
         for fm in self.files.borrow().iter() {
             if filename == fm.name {
-               (self.dep_tracking_callback.borrow())(&fm);
+               (self.dep_tracking_callback.borrow())(fm);
                 return Some(fm.clone());
             }
         }
index ede8a33df6546db821b3681f93758aa362e43a29..2e98c7d962606cf524f7cbd718f008f507591b4e 100644 (file)
@@ -123,7 +123,7 @@ pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {
                 return false;
             }
 
-            let mis = if !is_cfg(&attr) {
+            let mis = if !is_cfg(attr) {
                 return true;
             } else if let Some(mis) = attr.meta_item_list() {
                 mis
@@ -150,7 +150,7 @@ fn visit_expr_attrs(&mut self, attrs: &[ast::Attribute]) {
         // flag the offending attributes
         for attr in attrs.iter() {
             if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
-                let mut err = feature_err(&self.sess,
+                let mut err = feature_err(self.sess,
                                           "stmt_expr_attributes",
                                           attr.span,
                                           GateIssue::Language,
@@ -258,7 +258,7 @@ pub fn configure_stmt(&mut self, stmt: ast::Stmt) -> Option<ast::Stmt> {
     pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> {
         if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
             if !field.attrs.is_empty() {
-                let mut err = feature_err(&self.sess,
+                let mut err = feature_err(self.sess,
                                           "struct_field_attributes",
                                           field.span,
                                           GateIssue::Language,
@@ -290,7 +290,7 @@ fn visit_struct_field_attrs(&mut self, attrs: &[ast::Attribute]) {
         for attr in attrs.iter() {
             if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
                 let mut err = feature_err(
-                    &self.sess,
+                    self.sess,
                     "struct_field_attributes",
                     attr.span,
                     GateIssue::Language,
index fe5cb87ad59b5c89370490754c45576b6bcad900..73aeb40df840064ae0ffd2ea79ee369a9939d498 100644 (file)
@@ -120,7 +120,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
 
         // URLs can be unavoidably longer than the line limit, so we allow them.
         // Allowed format is: `[name]: https://www.rust-lang.org/`
-        let is_url = |l: &str| l.starts_with('[') && l.contains("]:") && l.contains("http");
+        let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http");
 
         if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) {
             ecx.span_err(span, &format!(
@@ -177,7 +177,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
             if let Err(e) = output_metadata(ecx,
                                             &target_triple,
                                             &crate_name.name.as_str(),
-                                            &diagnostics) {
+                                            diagnostics) {
                 ecx.span_bug(span, &format!(
                     "error writing metadata for triple `{}` and crate `{}`, error: {}, \
                      cause: {:?}",
@@ -227,7 +227,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
 
     MacEager::items(SmallVector::many(vec![
         P(ast::Item {
-            ident: name.clone(),
+            ident: *name,
             attrs: Vec::new(),
             id: ast::DUMMY_NODE_ID,
             node: ast::ItemKind::Const(
index f731c5abdd6a1f7e0ad275f45460e047e1f5779f..31a7e0d58d0d8e20fe3c290bee248f9a3378fec2 100644 (file)
@@ -635,8 +635,8 @@ pub struct ExpansionData {
 }
 
 /// One of these is made during expansion and incrementally updated as we go;
-/// when a macro expansion occurs, the resulting nodes have the backtrace()
-/// -> expn_info of their expansion context stored into their span.
+/// when a macro expansion occurs, the resulting nodes have the `backtrace()
+/// -> expn_info` of their expansion context stored into their span.
 pub struct ExtCtxt<'a> {
     pub parse_sess: &'a parse::ParseSess,
     pub ecfg: expand::ExpansionConfig<'a>,
@@ -709,7 +709,7 @@ pub fn expansion_cause(&self) -> Span {
                 }
                 ctxt = info.call_site.ctxt;
                 last_macro = Some(info.call_site);
-                return Some(());
+                Some(())
             }).is_none() {
                 break
             }
@@ -770,9 +770,9 @@ pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
     }
     pub fn trace_macros_diag(&self) {
         for (sp, notes) in self.expansions.iter() {
-            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro");
+            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
             for note in notes {
-                db.note(&note);
+                db.note(note);
             }
             db.emit();
         }
@@ -795,7 +795,7 @@ pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
             v.push(self.ident_of(s));
         }
         v.extend(components.iter().map(|s| self.ident_of(s)));
-        return v
+        v
     }
     pub fn name_of(&self, st: &str) -> ast::Name {
         Symbol::intern(st)
index a8aa103f80a8e4a7c0f150d59ebfd2a427cd7517..a56336795392597e5f9645f315a9304d28ba5fda 100644 (file)
@@ -415,19 +415,19 @@ fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
 
         match *ext {
             MultiModifier(ref mac) => {
-                let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+                let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
                 let item = mac.expand(self.cx, attr.span, &meta, item);
                 kind.expect_from_annotatables(item)
             }
             MultiDecorator(ref mac) => {
                 let mut items = Vec::new();
-                let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+                let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
                 mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
                 items.push(item);
                 kind.expect_from_annotatables(items)
             }
             SyntaxExtension::AttrProcMacro(ref mac) => {
-                let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+                let item_toks = stream_for_item(&item, self.cx.parse_sess);
 
                 let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
                 let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
@@ -439,7 +439,7 @@ fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
             }
             _ => {
                 let msg = &format!("macro `{}` may not be used in attributes", attr.path);
-                self.cx.span_err(attr.span, &msg);
+                self.cx.span_err(attr.span, msg);
                 kind.dummy(attr.span)
             }
         }
@@ -454,7 +454,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
         };
         let path = &mac.node.path;
 
-        let ident = ident.unwrap_or(keywords::Invalid.ident());
+        let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
         let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
         let opt_expanded = match *ext {
             NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
@@ -591,7 +591,7 @@ fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -
             }
             _ => {
                 let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
-                self.cx.span_err(span, &msg);
+                self.cx.span_err(span, msg);
                 kind.dummy(span)
             }
         }
@@ -749,19 +749,15 @@ fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {
     fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
         let features = self.cx.ecfg.features.unwrap();
         for attr in attrs.iter() {
-            feature_gate::check_attribute(&attr, &self.cx.parse_sess, features);
+            feature_gate::check_attribute(attr, self.cx.parse_sess, features);
         }
     }
 }
 
 pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
-    for i in 0 .. attrs.len() {
-        if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
-             return Some(attrs.remove(i));
-        }
-    }
-
-    None
+    attrs.iter()
+         .position(|a| !attr::is_known(a) && !is_builtin_attr(a))
+         .map(|i| attrs.remove(i))
 }
 
 // These are pretty nasty. Ideally, we would keep the tokens around, linked from
@@ -923,7 +919,7 @@ fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
                 let result = noop_fold_item(item, self);
                 self.cx.current_expansion.module = orig_module;
                 self.cx.current_expansion.directory_ownership = orig_directory_ownership;
-                return result;
+                result
             }
             // Ensure that test functions are accessible from the test harness.
             ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => {
index d7a85baa3fff56b127eefdea5fcd24be5aa7a677..85ae65e6b79ae3c5d8026385784be3bdbda0fdf2 100644 (file)
@@ -23,7 +23,7 @@
 ///
 /// This is registered as a set of expression syntax extension called quote!
 /// that lifts its argument token-tree to an AST representing the
-/// construction of the same token tree, with token::SubstNt interpreted
+/// construction of the same token tree, with `token::SubstNt` interpreted
 /// as antiquotes (splices).
 
 pub mod rt {
@@ -389,7 +389,7 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
                 result = results.pop().unwrap();
                 result.push(tree);
             }
-            tree @ _ => result.push(tree),
+            tree => result.push(tree),
         }
     }
     result
index 22a5776315a0cfe6cdf3c1e72e7de81ab0ef4b92..4183583d66ffd1b34a7929b9729817ebab60dfd8 100644 (file)
@@ -150,7 +150,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
             cx.span_err(sp,
                         &format!("{} wasn't a utf-8 file",
                                 file.display()));
-            return DummyResult::expr(sp);
+            DummyResult::expr(sp)
         }
     }
 }
@@ -167,7 +167,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
         Err(e) => {
             cx.span_err(sp,
                         &format!("couldn't read {}: {}", file.display(), e));
-            return DummyResult::expr(sp);
+            DummyResult::expr(sp)
         }
         Ok(..) => {
             // Add this input file to the code map to make it available as
index eb0b7c29f8d9ad8d74aa15a283c7b12f5ea1c608..bf66aa0f00bed42409ffbdc7ffe8cc3341d2a787 100644 (file)
 //! repetitions indicated by Kleene stars. It only advances or calls out to the
 //! real Rust parser when no `cur_eis` items remain
 //!
-//! Example: Start parsing `a a a a b` against [· a $( a )* a b].
+//! Example:
 //!
-//! Remaining input: `a a a a b`
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
 //! next_eis: [· a $( a )* a b]
 //!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
 //!
-//! Remaining input: `a a a b`
+//! Remaining input: a a a b
 //! cur: [a Â· $( a )* a b]
 //! Descend/Skip (first item).
 //! next: [a $( Â· a )* a b]  [a $( a )* Â· a b].
 //!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
 //!
-//! Remaining input: `a a b`
+//! Remaining input: a a b
 //! cur: [a $( a Â· )* a b]  next: [a $( a )* a Â· b]
 //! Finish/Repeat (first item)
 //! next: [a $( a )* Â· a b]  [a $( Â· a )* a b]  [a $( a )* a Â· b]
 //!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
 //!
-//! Remaining input: `a b`
+//! Remaining input: a b
 //! cur: [a $( a Â· )* a b]  next: [a $( a )* a Â· b]
 //! Finish/Repeat (first item)
 //! next: [a $( a )* Â· a b]  [a $( Â· a )* a b]  [a $( a )* a Â· b]
 //!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
 //!
-//! Remaining input: `b`
+//! Remaining input: b
 //! cur: [a $( a Â· )* a b]  next: [a $( a )* a Â· b]
 //! Finish/Repeat (first item)
 //! next: [a $( a )* Â· a b]  [a $( Â· a )* a b]
 //!
-//! - - - Advance over a `b`. - - -
+//! - - - Advance over a b. - - -
 //!
-//! Remaining input: ``
+//! Remaining input: ''
 //! eof: [a $( a )* a b Â·]
+//! ```
 
 pub use self::NamedMatch::*;
 pub use self::ParseResult::*;
@@ -178,20 +182,20 @@ fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
     })
 }
 
-/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
+/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
 /// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
-/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
-/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
+/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
+/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
 ///
-/// The in-memory structure of a particular NamedMatch represents the match
+/// The in-memory structure of a particular `NamedMatch` represents the match
 /// that occurred when a particular subset of a matcher was applied to a
 /// particular token tree.
 ///
-/// The width of each MatchedSeq in the NamedMatch, and the identity of the
-/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
-/// each MatchedSeq corresponds to a single TTSeq in the originating
-/// token tree. The depth of the NamedMatch structure will therefore depend
+/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
+/// the `MatchedNonterminal`s, will depend on the token tree it was applied
+/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
+/// token tree. The depth of the `NamedMatch` structure will therefore depend
 /// only on the nesting depth of `ast::TTSeq`s in the originating
 /// token tree it was derived from.
 
@@ -267,11 +271,12 @@ pub fn parse_failure_msg(tok: Token) -> String {
 
 /// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
 fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
-    match (t1,t2) {
-        (&token::Ident(id1),&token::Ident(id2))
-        | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
-            id1.name == id2.name,
-        _ => *t1 == *t2
+    if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
+        id1.name == id2.name
+    } else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
+        id1.name == id2.name
+    } else {
+        *t1 == *t2
     }
 }
 
@@ -334,7 +339,7 @@ fn inner_parse_loop(sess: &ParseSess,
                 // Check if we need a separator
                 if idx == len && ei.sep.is_some() {
                     // We have a separator, and it is the current token.
-                    if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) {
+                    if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
                         ei.idx += 1;
                         next_eis.push(ei);
                     }
@@ -401,7 +406,7 @@ fn inner_parse_loop(sess: &ParseSess,
                     cur_eis.push(ei);
                 }
                 TokenTree::Token(_, ref t) => {
-                    if token_name_eq(t, &token) {
+                    if token_name_eq(t, token) {
                         ei.idx += 1;
                         next_eis.push(ei);
                     }
@@ -485,11 +490,8 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op
 }
 
 fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
-    match name {
-        "tt" => {
-            return token::NtTT(p.parse_token_tree());
-        }
-        _ => {}
+    if name == "tt" {
+        return token::NtTT(p.parse_token_tree());
     }
     // check at the beginning and the parser checks after each bump
     p.process_potential_macro_variable();
index f959ccc989e2e9e9d55643966f0fe4648b8bd17b..4a307ab18a593b07e547b534b38fbc09bd8149b8 100644 (file)
@@ -94,7 +94,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                           -> Box<MacResult+'cx> {
     if cx.trace_macros() {
         let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
-        let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert(vec![]);
+        let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
         values.push(format!("expands to `{}! {{ {} }}`", name, arg));
     }
 
@@ -206,7 +206,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
     let mut valid = true;
 
     // Extract the arguments:
-    let lhses = match **argument_map.get(&lhs_nm).unwrap() {
+    let lhses = match *argument_map[&lhs_nm] {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
@@ -222,7 +222,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
         _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
     };
 
-    let rhses = match **argument_map.get(&rhs_nm).unwrap() {
+    let rhses = match *argument_map[&rhs_nm] {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
@@ -260,13 +260,12 @@ fn check_lhs_nt_follows(sess: &ParseSess,
                         lhs: &quoted::TokenTree) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    match lhs {
-        &quoted::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts),
-        _ => {
-            let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
-            sess.span_diagnostic.span_err(lhs.span(), msg);
-            false
-        }
+    if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
+        check_matcher(sess, features, &tts.tts)
+    } else {
+        let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+        sess.span_diagnostic.span_err(lhs.span(), msg);
+        false
     }
     // we don't abort on errors on rejection, the driver will do that for us
     // after parsing/expansion. we can report every error in every macro this way.
@@ -283,17 +282,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
                 return false;
             },
             TokenTree::Sequence(span, ref seq) => {
-                if seq.separator.is_none() {
-                    if seq.tts.iter().all(|seq_tt| {
-                        match *seq_tt {
-                            TokenTree::Sequence(_, ref sub_seq) =>
-                                sub_seq.op == quoted::KleeneOp::ZeroOrMore,
-                            _ => false,
-                        }
-                    }) {
-                        sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
-                        return false;
+                if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
+                    match *seq_tt {
+                        TokenTree::Sequence(_, ref sub_seq) =>
+                            sub_seq.op == quoted::KleeneOp::ZeroOrMore,
+                        _ => false,
                     }
+                }) {
+                    sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+                    return false;
                 }
                 if !check_lhs_no_empty_seq(sess, &seq.tts) {
                     return false;
@@ -407,7 +404,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
                 }
             }
 
-            return first;
+            first
         }
     }
 
@@ -469,7 +466,7 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
         // we only exit the loop if `tts` was empty or if every
         // element of `tts` matches the empty sequence.
         assert!(first.maybe_empty);
-        return first;
+        first
     }
 }
 
@@ -579,7 +576,7 @@ fn check_matcher_core(sess: &ParseSess,
         let build_suffix_first = || {
             let mut s = first_sets.first(suffix);
             if s.maybe_empty { s.add_all(follow); }
-            return s;
+            s
         };
 
         // (we build `suffix_first` on demand below; you can tell
@@ -861,6 +858,7 @@ fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
         quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
-        _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
+        _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+                     in follow set checker"),
     }
 }
index d216effbd450812502d4a1c675cb933474f6ab9c..fa65e9501c2bb793aa34c64956aaff717bb4227d 100644 (file)
@@ -96,6 +96,17 @@ pub fn len(&self) -> usize {
         }
     }
 
+    pub fn is_empty(&self) -> bool {
+        match *self {
+            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+                token::NoDelim => delimed.tts.is_empty(),
+                _ => false,
+            },
+            TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
+            _ => true,
+        }
+    }
+
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
             (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
@@ -144,9 +155,9 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
                             }
                             _ => end_sp,
                         },
-                        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                        tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
                     },
-                    tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+                    tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
                 };
                 sess.missing_fragment_specifiers.borrow_mut().insert(span);
                 result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
@@ -228,10 +239,10 @@ fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
                     Some(op) => return (Some(tok), op),
                     None => span,
                 },
-                tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
             }
         },
-        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+        tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
     };
 
     sess.span_diagnostic.span_err(span, "expected `*` or `+`");
index 947089b0b9ac4b977686c26603f47b06664545f9..2a435bdea107f2d11d2bf22a0f20005dc0f589aa 100644 (file)
@@ -121,20 +121,20 @@ pub fn transcribe(sp_diag: &Handler,
                                          &repeats) {
                     LockstepIterSize::Unconstrained => {
                         panic!(sp_diag.span_fatal(
-                            sp.clone(), /* blame macro writer */
+                            sp, /* blame macro writer */
                             "attempted to repeat an expression \
                              containing no syntax \
                              variables matched as repeating at this depth"));
                     }
                     LockstepIterSize::Contradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
+                        panic!(sp_diag.span_fatal(sp, &msg[..]));
                     }
                     LockstepIterSize::Constraint(len, _) => {
                         if len == 0 {
                             if seq.op == quoted::KleeneOp::OneOrMore {
                                 // FIXME #2887 blame invoker
-                                panic!(sp_diag.span_fatal(sp.clone(),
+                                panic!(sp_diag.span_fatal(sp,
                                                           "this must repeat at least once"));
                             }
                         } else {
index b6a2c983fd4d7fb687abc39bea3ea65ddb14a844..09090ab87313087ac393082c21f6f95a0c335a83 100644 (file)
@@ -472,7 +472,7 @@ pub enum Stability {
 impl ::std::fmt::Debug for AttributeGate {
     fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
         match *self {
-            Gated(ref stab, ref name, ref expl, _) =>
+            Gated(ref stab, name, expl, _) =>
                 write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
             Ungated => write!(fmt, "Ungated")
         }
@@ -816,7 +816,7 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
 ];
 
 // cfg(...)'s that are feature gated
-const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[
+const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
     // (name in cfg, feature, function to check if the feature is enabled)
     ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
     ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
@@ -881,7 +881,7 @@ fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
         let name = unwrap_or!(attr.name(), return).as_str();
         for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
             if name == n {
-                if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
+                if let Gated(_, name, desc, ref has_feature) = *gateage {
                     gate_feature_fn!(self, has_feature, attr.span, name, desc);
                 }
                 debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
@@ -1098,7 +1098,7 @@ fn contains_novel_literal(item: &ast::MetaItem) -> bool {
         NameValue(ref lit) => !lit.node.is_str(),
         List(ref list) => list.iter().any(|li| {
             match li.node {
-                MetaItem(ref mi) => contains_novel_literal(&mi),
+                MetaItem(ref mi) => contains_novel_literal(mi),
                 Literal(_) => true,
             }
         }),
@@ -1120,7 +1120,7 @@ fn visit_attribute(&mut self, attr: &ast::Attribute) {
             return
         }
 
-        let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
+        let meta = panictry!(attr.parse_meta(self.context.parse_sess));
         if contains_novel_literal(&meta) {
             gate_feature_post!(&self, attr_literals, attr.span,
                                "non-string literals in attributes, or string \
@@ -1216,14 +1216,11 @@ fn visit_item(&mut self, i: &'a ast::Item) {
             }
 
             ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
-                match polarity {
-                    ast::ImplPolarity::Negative => {
-                        gate_feature_post!(&self, optin_builtin_traits,
-                                           i.span,
-                                           "negative trait bounds are not yet fully implemented; \
-                                            use marker types for now");
-                    },
-                    _ => {}
+                if polarity == ast::ImplPolarity::Negative {
+                    gate_feature_post!(&self, optin_builtin_traits,
+                                       i.span,
+                                       "negative trait bounds are not yet fully implemented; \
+                                        use marker types for now");
                 }
 
                 if let ast::Defaultness::Default = defaultness {
@@ -1272,11 +1269,9 @@ fn visit_ty(&mut self, ty: &'a ast::Ty) {
 
     fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) {
         if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty {
-            match output_ty.node {
-                ast::TyKind::Never => return,
-                _ => (),
-            };
-            self.visit_ty(output_ty)
+            if output_ty.node != ast::TyKind::Never {
+                self.visit_ty(output_ty)
+            }
         }
     }
 
@@ -1373,17 +1368,14 @@ fn visit_fn(&mut self,
                 span: Span,
                 _node_id: NodeId) {
         // check for const fn declarations
-        match fn_kind {
-            FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) => {
-                gate_feature_post!(&self, const_fn, span, "const fn is unstable");
-            }
-            _ => {
-                // stability of const fn methods are covered in
-                // visit_trait_item and visit_impl_item below; this is
-                // because default methods don't pass through this
-                // point.
-            }
+        if let FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) =
+            fn_kind {
+            gate_feature_post!(&self, const_fn, span, "const fn is unstable");
         }
+        // stability of const fn methods are covered in
+        // visit_trait_item and visit_impl_item below; this is
+        // because default methods don't pass through this
+        // point.
 
         match fn_kind {
             FnKind::ItemFn(_, _, _, _, abi, _, _) |
index 06335584c96108571e0136a1ba84a052270820e9..f37dcfdde8985f7594b3381e8e7cf4e57a2a889f 100644 (file)
@@ -337,7 +337,7 @@ fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
                       })
                      .collect()
              })
-            .unwrap_or(vec![])
+            .unwrap_or_else(|_| vec![])
     }
 }
 
index 92cec462ffb7cefbdd623490c125dab5a4e5f4bf..082930777e598cfe4a9392e1dd7e18106ff648fa 100644 (file)
@@ -62,7 +62,7 @@ pub fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
                 _ => break,
             }
         }
-        return Ok(attrs);
+        Ok(attrs)
     }
 
     /// Matches `attribute = # ! [ meta_item ]`
@@ -182,7 +182,7 @@ pub fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
                     }
 
                     let attr = self.parse_attribute(true)?;
-                    assert!(attr.style == ast::AttrStyle::Inner);
+                    assert_eq!(attr.style, ast::AttrStyle::Inner);
                     attrs.push(attr);
                 }
                 token::DocComment(s) => {
index 4fe4ec7e4c0ed3f785fb498ded087152b4b67892..0c6f09ba7666cb0ed6b5896194c75794f1f2d5d1 100644 (file)
@@ -43,14 +43,14 @@ pub fn expr_is_simple_block(e: &ast::Expr) -> bool {
 }
 
 /// this statement requires a semicolon after it.
-/// note that in one case (stmt_semi), we've already
+/// note that in one case (`stmt_semi`), we've already
 /// seen the semicolon, and thus don't need another.
 pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
     match *stmt {
         ast::StmtKind::Local(_) => true,
-        ast::StmtKind::Item(_) => false,
         ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
-        ast::StmtKind::Semi(..) => false,
+        ast::StmtKind::Item(_) |
+        ast::StmtKind::Semi(..) |
         ast::StmtKind::Mac(..) => false,
     }
 }
index b57708f9193a39c4daed2ba0be23ce010c1ee45f..fe931f7cf6a645f4025baea781dc4e5a89ac2460 100644 (file)
@@ -12,7 +12,7 @@
 
 use parse::token;
 
-/// SeqSep : a sequence separator (token)
+/// `SeqSep` : a sequence separator (token)
 /// and whether a trailing separator is allowed.
 pub struct SeqSep {
     pub sep: Option<token::Token>,
index 7ac322b144c7ea46c9641fbb719f87909e0f6998..8b545d3b909e823c472a1224219a723ce53731cf 100644 (file)
@@ -77,7 +77,7 @@ fn vertical_trim(lines: Vec<String>) -> Vec<String> {
         while j > i && lines[j - 1].trim().is_empty() {
             j -= 1;
         }
-        lines[i..j].iter().cloned().collect()
+        lines[i..j].to_vec()
     }
 
     /// remove a "[ \t]*\*" block from each line, if possible
index a83b19c7334e7b70b33dac1ad6db07d5b3c3ae7d..0bcd457851890c6db66ecbae0d988c990cbf3865 100644 (file)
@@ -144,7 +144,7 @@ pub fn peek(&self) -> TokenAndSpan {
 
 impl<'a> StringReader<'a> {
     /// For comments.rs, which hackily pokes into next_pos and ch
-    pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
+    pub fn new_raw(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
         let mut sr = StringReader::new_raw_internal(sess, filemap);
         sr.bump();
         sr
@@ -180,7 +180,7 @@ fn new_raw_internal(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Se
 
     pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
         let mut sr = StringReader::new_raw(sess, filemap);
-        if let Err(_) = sr.advance_token() {
+        if sr.advance_token().is_err() {
             sr.emit_fatal_errors();
             panic!(FatalError);
         }
@@ -205,7 +205,7 @@ pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
 
         sr.bump();
 
-        if let Err(_) = sr.advance_token() {
+        if sr.advance_token().is_err() {
             sr.emit_fatal_errors();
             panic!(FatalError);
         }
@@ -525,7 +525,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
                         self.bump();
                     }
 
-                    return if doc_comment {
+                    if doc_comment {
                         self.with_str_from(start_bpos, |string| {
                             // comments with only more "/"s are not doc comments
                             let tok = if is_doc_comment(string) {
@@ -544,7 +544,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
                             tok: token::Comment,
                             sp: mk_sp(start_bpos, self.pos),
                         })
-                    };
+                    }
                 }
                 Some('*') => {
                     self.bump();
@@ -764,7 +764,7 @@ fn scan_number(&mut self, c: char) -> token::Lit {
             }
             let pos = self.pos;
             self.check_float_base(start_bpos, pos, base);
-            return token::Float(self.name_from(start_bpos));
+            token::Float(self.name_from(start_bpos))
         } else {
             // it might be a float if it has an exponent
             if self.ch_is('e') || self.ch_is('E') {
@@ -774,7 +774,7 @@ fn scan_number(&mut self, c: char) -> token::Lit {
                 return token::Float(self.name_from(start_bpos));
             }
             // but we certainly have an integer!
-            return token::Integer(self.name_from(start_bpos));
+            token::Integer(self.name_from(start_bpos))
         }
     }
 
@@ -1051,9 +1051,9 @@ fn binop(&mut self, op: token::BinOpToken) -> token::Token {
         self.bump();
         if self.ch_is('=') {
             self.bump();
-            return token::BinOpEq(op);
+            token::BinOpEq(op)
         } else {
-            return token::BinOp(op);
+            token::BinOp(op)
         }
     }
 
@@ -1100,15 +1100,15 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
             // One-byte tokens.
             ';' => {
                 self.bump();
-                return Ok(token::Semi);
+                Ok(token::Semi)
             }
             ',' => {
                 self.bump();
-                return Ok(token::Comma);
+                Ok(token::Comma)
             }
             '.' => {
                 self.bump();
-                return if self.ch_is('.') {
+                if self.ch_is('.') {
                     self.bump();
                     if self.ch_is('.') {
                         self.bump();
@@ -1118,61 +1118,61 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     }
                 } else {
                     Ok(token::Dot)
-                };
+                }
             }
             '(' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Paren));
+                Ok(token::OpenDelim(token::Paren))
             }
             ')' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Paren));
+                Ok(token::CloseDelim(token::Paren))
             }
             '{' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Brace));
+                Ok(token::OpenDelim(token::Brace))
             }
             '}' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Brace));
+                Ok(token::CloseDelim(token::Brace))
             }
             '[' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Bracket));
+                Ok(token::OpenDelim(token::Bracket))
             }
             ']' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Bracket));
+                Ok(token::CloseDelim(token::Bracket))
             }
             '@' => {
                 self.bump();
-                return Ok(token::At);
+                Ok(token::At)
             }
             '#' => {
                 self.bump();
-                return Ok(token::Pound);
+                Ok(token::Pound)
             }
             '~' => {
                 self.bump();
-                return Ok(token::Tilde);
+                Ok(token::Tilde)
             }
             '?' => {
                 self.bump();
-                return Ok(token::Question);
+                Ok(token::Question)
             }
             ':' => {
                 self.bump();
                 if self.ch_is(':') {
                     self.bump();
-                    return Ok(token::ModSep);
+                    Ok(token::ModSep)
                 } else {
-                    return Ok(token::Colon);
+                    Ok(token::Colon)
                 }
             }
 
             '$' => {
                 self.bump();
-                return Ok(token::Dollar);
+                Ok(token::Dollar)
             }
 
             // Multi-byte tokens.
@@ -1180,21 +1180,21 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 self.bump();
                 if self.ch_is('=') {
                     self.bump();
-                    return Ok(token::EqEq);
+                    Ok(token::EqEq)
                 } else if self.ch_is('>') {
                     self.bump();
-                    return Ok(token::FatArrow);
+                    Ok(token::FatArrow)
                 } else {
-                    return Ok(token::Eq);
+                    Ok(token::Eq)
                 }
             }
             '!' => {
                 self.bump();
                 if self.ch_is('=') {
                     self.bump();
-                    return Ok(token::Ne);
+                    Ok(token::Ne)
                 } else {
-                    return Ok(token::Not);
+                    Ok(token::Not)
                 }
             }
             '<' => {
@@ -1202,21 +1202,21 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 match self.ch.unwrap_or('\x00') {
                     '=' => {
                         self.bump();
-                        return Ok(token::Le);
+                        Ok(token::Le)
                     }
                     '<' => {
-                        return Ok(self.binop(token::Shl));
+                        Ok(self.binop(token::Shl))
                     }
                     '-' => {
                         self.bump();
                         match self.ch.unwrap_or('\x00') {
                             _ => {
-                                return Ok(token::LArrow);
+                                Ok(token::LArrow)
                             }
                         }
                     }
                     _ => {
-                        return Ok(token::Lt);
+                        Ok(token::Lt)
                     }
                 }
             }
@@ -1225,13 +1225,13 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 match self.ch.unwrap_or('\x00') {
                     '=' => {
                         self.bump();
-                        return Ok(token::Ge);
+                        Ok(token::Ge)
                     }
                     '>' => {
-                        return Ok(self.binop(token::Shr));
+                        Ok(self.binop(token::Shr))
                     }
                     _ => {
-                        return Ok(token::Gt);
+                        Ok(token::Gt)
                     }
                 }
             }
@@ -1301,7 +1301,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 };
                 self.bump(); // advance ch past token
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::Char(id), suffix));
+                Ok(token::Literal(token::Char(id), suffix))
             }
             'b' => {
                 self.bump();
@@ -1312,7 +1312,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     _ => unreachable!(),  // Should have been a token::Ident above.
                 };
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(lit, suffix));
+                Ok(token::Literal(lit, suffix))
             }
             '"' => {
                 let start_bpos = self.pos;
@@ -1343,7 +1343,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 };
                 self.bump();
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::Str_(id), suffix));
+                Ok(token::Literal(token::Str_(id), suffix))
             }
             'r' => {
                 let start_bpos = self.pos;
@@ -1414,24 +1414,24 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     Symbol::intern("??")
                 };
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
+                Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
             }
             '-' => {
                 if self.nextch_is('>') {
                     self.bump();
                     self.bump();
-                    return Ok(token::RArrow);
+                    Ok(token::RArrow)
                 } else {
-                    return Ok(self.binop(token::Minus));
+                    Ok(self.binop(token::Minus))
                 }
             }
             '&' => {
                 if self.nextch_is('&') {
                     self.bump();
                     self.bump();
-                    return Ok(token::AndAnd);
+                    Ok(token::AndAnd)
                 } else {
-                    return Ok(self.binop(token::And));
+                    Ok(self.binop(token::And))
                 }
             }
             '|' => {
@@ -1439,27 +1439,27 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     Some('|') => {
                         self.bump();
                         self.bump();
-                        return Ok(token::OrOr);
+                        Ok(token::OrOr)
                     }
                     _ => {
-                        return Ok(self.binop(token::Or));
+                        Ok(self.binop(token::Or))
                     }
                 }
             }
             '+' => {
-                return Ok(self.binop(token::Plus));
+                Ok(self.binop(token::Plus))
             }
             '*' => {
-                return Ok(self.binop(token::Star));
+                Ok(self.binop(token::Star))
             }
             '/' => {
-                return Ok(self.binop(token::Slash));
+                Ok(self.binop(token::Slash))
             }
             '^' => {
-                return Ok(self.binop(token::Caret));
+                Ok(self.binop(token::Caret))
             }
             '%' => {
-                return Ok(self.binop(token::Percent));
+                Ok(self.binop(token::Percent))
             }
             c => {
                 let last_bpos = self.pos;
@@ -1468,7 +1468,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                                                           bpos,
                                                           "unknown start of token",
                                                           c);
-                unicode_chars::check_for_substitution(&self, c, &mut err);
+                unicode_chars::check_for_substitution(self, c, &mut err);
                 self.fatal_errs.push(err);
                 Err(())
             }
@@ -1490,14 +1490,14 @@ fn read_to_eol(&mut self) -> String {
         if self.ch_is('\n') {
             self.bump();
         }
-        return val;
+        val
     }
 
     fn read_one_line_comment(&mut self) -> String {
         let val = self.read_to_eol();
         assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') ||
                 (val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!'));
-        return val;
+        val
     }
 
     fn consume_non_eol_whitespace(&mut self) {
@@ -1541,7 +1541,7 @@ fn scan_byte(&mut self) -> token::Lit {
             Symbol::intern("?")
         };
         self.bump(); // advance ch past token
-        return token::Byte(id);
+        token::Byte(id)
     }
 
     fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool {
@@ -1574,7 +1574,7 @@ fn scan_byte_string(&mut self) -> token::Lit {
             Symbol::intern("??")
         };
         self.bump();
-        return token::ByteStr(id);
+        token::ByteStr(id)
     }
 
     fn scan_raw_byte_string(&mut self) -> token::Lit {
@@ -1627,8 +1627,8 @@ fn scan_raw_byte_string(&mut self) -> token::Lit {
             self.bump();
         }
         self.bump();
-        return token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
-                                 hash_count);
+        token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
+                                 hash_count)
     }
 }
 
@@ -1646,7 +1646,7 @@ fn in_range(c: Option<char>, lo: char, hi: char) -> bool {
 }
 
 fn is_dec_digit(c: Option<char>) -> bool {
-    return in_range(c, '0', '9');
+    in_range(c, '0', '9')
 }
 
 pub fn is_doc_comment(s: &str) -> bool {
index fe3ca1cf2305c0341ae3fb50db3616cb0e581c1b..1eff819d755493f33f713b6281100591bab78413 100644 (file)
@@ -107,18 +107,18 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess)
     parser.parse_inner_attributes()
 }
 
-pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                       -> PResult<'a, ast::Crate> {
+pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess)
+                                       -> PResult<ast::Crate> {
     new_parser_from_source_str(sess, name, source).parse_crate_mod()
 }
 
-pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                             -> PResult<'a, Vec<ast::Attribute>> {
+pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess)
+                                             -> PResult<Vec<ast::Attribute>> {
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
-pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, P<ast::Expr>> {
+pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<P<ast::Expr>> {
     new_parser_from_source_str(sess, name, source).parse_expr()
 }
 
@@ -126,29 +126,29 @@ pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a Pa
 ///
 /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err`
 /// when a syntax error occurred.
-pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, Option<P<ast::Item>>> {
+pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<Option<P<ast::Item>>> {
     new_parser_from_source_str(sess, name, source).parse_item()
 }
 
-pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, ast::MetaItem> {
+pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<ast::MetaItem> {
     new_parser_from_source_str(sess, name, source).parse_meta_item()
 }
 
-pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, Option<ast::Stmt>> {
+pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<Option<ast::Stmt>> {
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
                                         -> TokenStream {
     filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
 }
 
 // Create a new parser from a source string
-pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
-                                      -> Parser<'a> {
+pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String)
+                                      -> Parser {
     filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
 }
 
@@ -173,7 +173,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 }
 
 /// Given a filemap and config, return a parser
-pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
+pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
     let end_pos = filemap.end_pos;
     let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
 
@@ -186,7 +186,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
     stream_to_parser(sess, tts.into_iter().collect())
 }
 
@@ -216,8 +216,8 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream
     panictry!(srdr.parse_all_token_trees())
 }
 
-/// Given stream and the ParseSess, produce a parser
-pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+/// Given stream and the `ParseSess`, produce a parser
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
     Parser::new(sess, stream, None, false)
 }
 
@@ -251,7 +251,7 @@ pub fn char_lit(lit: &str) -> (char, isize) {
             (c, 4)
         }
         'u' => {
-            assert!(lit.as_bytes()[2] == b'{');
+            assert_eq!(lit.as_bytes()[2], b'{');
             let idx = lit.find('}').unwrap();
             let v = u32::from_str_radix(&lit[3..idx], 16).unwrap();
             let c = char::from_u32(v).unwrap();
@@ -287,51 +287,46 @@ fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) {
     }
 
     let mut chars = lit.char_indices().peekable();
-    loop {
-        match chars.next() {
-            Some((i, c)) => {
-                match c {
-                    '\\' => {
-                        let ch = chars.peek().unwrap_or_else(|| {
-                            panic!("{}", error(i))
-                        }).1;
-
-                        if ch == '\n' {
-                            eat(&mut chars);
-                        } else if ch == '\r' {
-                            chars.next();
-                            let ch = chars.peek().unwrap_or_else(|| {
-                                panic!("{}", error(i))
-                            }).1;
-
-                            if ch != '\n' {
-                                panic!("lexer accepted bare CR");
-                            }
-                            eat(&mut chars);
-                        } else {
-                            // otherwise, a normal escape
-                            let (c, n) = char_lit(&lit[i..]);
-                            for _ in 0..n - 1 { // we don't need to move past the first \
-                                chars.next();
-                            }
-                            res.push(c);
-                        }
-                    },
-                    '\r' => {
-                        let ch = chars.peek().unwrap_or_else(|| {
-                            panic!("{}", error(i))
-                        }).1;
+    while let Some((i, c)) = chars.next() {
+        match c {
+            '\\' => {
+                let ch = chars.peek().unwrap_or_else(|| {
+                    panic!("{}", error(i))
+                }).1;
+
+                if ch == '\n' {
+                    eat(&mut chars);
+                } else if ch == '\r' {
+                    chars.next();
+                    let ch = chars.peek().unwrap_or_else(|| {
+                        panic!("{}", error(i))
+                    }).1;
 
-                        if ch != '\n' {
-                            panic!("lexer accepted bare CR");
-                        }
+                    if ch != '\n' {
+                        panic!("lexer accepted bare CR");
+                    }
+                    eat(&mut chars);
+                } else {
+                    // otherwise, a normal escape
+                    let (c, n) = char_lit(&lit[i..]);
+                    for _ in 0..n - 1 { // we don't need to move past the first \
                         chars.next();
-                        res.push('\n');
                     }
-                    c => res.push(c),
+                    res.push(c);
                 }
             },
-            None => break
+            '\r' => {
+                let ch = chars.peek().unwrap_or_else(|| {
+                    panic!("{}", error(i))
+                }).1;
+
+                if ch != '\n' {
+                    panic!("lexer accepted bare CR");
+                }
+                chars.next();
+                res.push('\n');
+            }
+            c => res.push(c),
         }
     }
 
@@ -346,22 +341,16 @@ pub fn raw_str_lit(lit: &str) -> String {
     debug!("raw_str_lit: given {}", escape_default(lit));
     let mut res = String::with_capacity(lit.len());
 
-    // FIXME #8372: This could be a for-loop if it didn't borrow the iterator
     let mut chars = lit.chars().peekable();
-    loop {
-        match chars.next() {
-            Some(c) => {
-                if c == '\r' {
-                    if *chars.peek().unwrap() != '\n' {
-                        panic!("lexer accepted bare CR");
-                    }
-                    chars.next();
-                    res.push('\n');
-                } else {
-                    res.push(c);
-                }
-            },
-            None => break
+    while let Some(c) = chars.next() {
+        if c == '\r' {
+            if *chars.peek().unwrap() != '\n' {
+                panic!("lexer accepted bare CR");
+            }
+            chars.next();
+            res.push('\n');
+        } else {
+            res.push(c);
         }
     }
 
@@ -459,7 +448,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
     if lit.len() == 1 {
         (lit.as_bytes()[0], 1)
     } else {
-        assert!(lit.as_bytes()[0] == b'\\', err(0));
+        assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0));
         let b = match lit.as_bytes()[1] {
             b'"' => b'"',
             b'n' => b'\n',
@@ -480,7 +469,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
                 }
             }
         };
-        return (b, 2);
+        (b, 2)
     }
 }
 
@@ -491,7 +480,7 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
     let error = |i| format!("lexer should have rejected {} at {}", lit, i);
 
     /// Eat everything up to a non-whitespace
-    fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
+    fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
         loop {
             match it.peek().map(|x| x.1) {
                 Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
@@ -578,7 +567,7 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler
             if let Some(err) = err {
                 err!(diag, |span, diag| diag.span_err(span, err));
             }
-            return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
+            return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
         }
     }
 
index d5baec675e44f2116ee1707c94315c7ba11ba2c7..078e86aa2941f36a8c28332986dcf3772177c15a 100644 (file)
@@ -59,7 +59,7 @@ fn report(&mut self,
 
         if !self.obsolete_set.contains(&kind) &&
             (error || self.sess.span_diagnostic.can_emit_warnings) {
-            err.note(&format!("{}", desc));
+            err.note(desc);
             self.obsolete_set.insert(kind);
         }
         err.emit();
index ca1351e3b4158c89793b06bd622f4e54f22aad99..4741f896d3cc0c8c6ce38501dd2e1cb1a7dcfe0e 100644 (file)
@@ -248,7 +248,7 @@ fn next(&mut self) -> TokenAndSpan {
     fn next_desugared(&mut self) -> TokenAndSpan {
         let (sp, name) = match self.next() {
             TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
-            tok @ _ => return tok,
+            tok => return tok,
         };
 
         let stripped = strip_doc_comment_decoration(&name.as_str());
@@ -354,7 +354,7 @@ pub enum Error {
 }
 
 impl Error {
-    pub fn span_err<'a>(self, sp: Span, handler: &'a errors::Handler) -> DiagnosticBuilder<'a> {
+    pub fn span_err(self, sp: Span, handler: &errors::Handler) -> DiagnosticBuilder {
         match self {
             Error::FileNotFoundForModule { ref mod_name,
                                            ref default_path,
@@ -478,9 +478,10 @@ pub fn new(sess: &'a ParseSess,
     }
 
     fn next_tok(&mut self) -> TokenAndSpan {
-        let mut next = match self.desugar_doc_comments {
-            true => self.token_cursor.next_desugared(),
-            false => self.token_cursor.next(),
+        let mut next = if self.desugar_doc_comments {
+            self.token_cursor.next_desugared()
+        } else {
+            self.token_cursor.next()
         };
         if next.sp == syntax_pos::DUMMY_SP {
             next.sp = self.prev_span;
@@ -551,7 +552,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
             // This might be a sign we need a connect method on Iterator.
             let b = i.next()
                      .map_or("".to_string(), |t| t.to_string());
-            i.enumerate().fold(b, |mut b, (i, ref a)| {
+            i.enumerate().fold(b, |mut b, (i, a)| {
                 if tokens.len() > 2 && i == tokens.len() - 2 {
                     b.push_str(", or ");
                 } else if tokens.len() == 2 && i == tokens.len() - 2 {
@@ -985,18 +986,15 @@ fn parse_seq_to_before_tokens<T, F, Fe>(&mut self,
                 token::CloseDelim(..) | token::Eof => break,
                 _ => {}
             };
-            match sep.sep {
-                Some(ref t) => {
-                    if first {
-                        first = false;
-                    } else {
-                        if let Err(e) = self.expect(t) {
-                            fe(e);
-                            break;
-                        }
+            if let Some(ref t) = sep.sep {
+                if first {
+                    first = false;
+                } else {
+                    if let Err(e) = self.expect(t) {
+                        fe(e);
+                        break;
                     }
                 }
-                _ => ()
             }
             if sep.trailing_sep_allowed && kets.iter().any(|k| self.check(k)) {
                 break;
@@ -1493,7 +1491,7 @@ fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PRe
         let sum_span = ty.span.to(self.prev_span);
 
         let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
-            "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty));
+            "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
 
         match ty.node {
             TyKind::Rptr(ref lifetime, ref mut_ty) => {
@@ -1547,7 +1545,7 @@ pub fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
 
     pub fn is_named_argument(&mut self) -> bool {
         let offset = match self.token {
-            token::BinOp(token::And) => 1,
+            token::BinOp(token::And) |
             token::AndAnd => 1,
             _ if self.token.is_keyword(keywords::Mut) => 1,
             _ => 0
@@ -3154,10 +3152,11 @@ pub fn parse_arm(&mut self) -> PResult<'a, Arm> {
 
         let attrs = self.parse_outer_attributes()?;
         let pats = self.parse_pats()?;
-        let mut guard = None;
-        if self.eat_keyword(keywords::If) {
-            guard = Some(self.parse_expr()?);
-        }
+        let guard = if self.eat_keyword(keywords::If) {
+            Some(self.parse_expr()?)
+        } else {
+            None
+        };
         self.expect(&token::FatArrow)?;
         let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?;
 
@@ -3600,10 +3599,11 @@ fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
         let lo = self.span;
         let pat = self.parse_pat()?;
 
-        let mut ty = None;
-        if self.eat(&token::Colon) {
-            ty = Some(self.parse_ty()?);
-        }
+        let ty = if self.eat(&token::Colon) {
+            Some(self.parse_ty()?)
+        } else {
+            None
+        };
         let init = self.parse_initializer()?;
         Ok(P(ast::Local {
             ty: ty,
@@ -3929,7 +3929,7 @@ fn parse_stmt_without_recovery(&mut self,
                 },
                 None => {
                     let unused_attrs = |attrs: &[_], s: &mut Self| {
-                        if attrs.len() > 0 {
+                        if !attrs.is_empty() {
                             if s.prev_token_kind == PrevTokenKind::DocComment {
                                 s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
                             } else {
@@ -4815,7 +4815,7 @@ fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
                 self.expect(&token::Not)?;
             }
 
-            self.complain_if_pub_macro(&vis, prev_span);
+            self.complain_if_pub_macro(vis, prev_span);
 
             // eat a matched-delimiter token tree:
             *at_end = true;
@@ -4917,13 +4917,10 @@ fn parse_item_impl(&mut self,
                 }
             }
         } else {
-            match polarity {
-                ast::ImplPolarity::Negative => {
-                    // This is a negated type implementation
-                    // `impl !MyType {}`, which is not allowed.
-                    self.span_err(neg_span, "inherent implementation can't be negated");
-                },
-                _ => {}
+            if polarity == ast::ImplPolarity::Negative {
+                // This is a negated type implementation
+                // `impl !MyType {}`, which is not allowed.
+                self.span_err(neg_span, "inherent implementation can't be negated");
             }
             None
         };
@@ -5185,7 +5182,7 @@ pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibili
                 let path_span = self.prev_span;
                 let help_msg = format!("make this visible only to module `{}` with `in`:", path);
                 self.expect(&token::CloseDelim(token::Paren))?;  // `)`
-                let mut err = self.span_fatal_help(path_span, &msg, &suggestion);
+                let mut err = self.span_fatal_help(path_span, msg, suggestion);
                 err.span_suggestion(path_span, &help_msg, format!("in {}", path));
                 err.emit();  // emit diagnostic, but continue with public visibility
             }
index 25cabef70c15b5765b89a916db34a457a62a6e95..77db604c56e118c0596364536b3b1ed3674a2f24 100644 (file)
@@ -53,6 +53,10 @@ impl DelimToken {
     pub fn len(self) -> usize {
         if self == NoDelim { 0 } else { 1 }
     }
+
+    pub fn is_empty(self) -> bool {
+        self == NoDelim
+    }
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@@ -198,17 +202,17 @@ pub fn is_like_gt(&self) -> bool {
     pub fn can_begin_expr(&self) -> bool {
         match *self {
             Ident(ident)                => ident_can_begin_expr(ident), // value name or keyword
-            OpenDelim(..)               => true, // tuple, array or block
-            Literal(..)                 => true, // literal
-            Not                         => true, // operator not
-            BinOp(Minus)                => true, // unary minus
-            BinOp(Star)                 => true, // dereference
-            BinOp(Or) | OrOr            => true, // closure
-            BinOp(And)                  => true, // reference
-            AndAnd                      => true, // double reference
-            DotDot | DotDotDot          => true, // range notation
-            Lt | BinOp(Shl)             => true, // associated path
-            ModSep                      => true, // global path
+            OpenDelim(..)               | // tuple, array or block
+            Literal(..)                 | // literal
+            Not                         | // operator not
+            BinOp(Minus)                | // unary minus
+            BinOp(Star)                 | // dereference
+            BinOp(Or) | OrOr            | // closure
+            BinOp(And)                  | // reference
+            AndAnd                      | // double reference
+            DotDot | DotDotDot          | // range notation
+            Lt | BinOp(Shl)             | // associated path
+            ModSep                      | // global path
             Pound                       => true, // expression attributes
             Interpolated(ref nt) => match **nt {
                 NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
@@ -222,16 +226,16 @@ pub fn can_begin_expr(&self) -> bool {
     pub fn can_begin_type(&self) -> bool {
         match *self {
             Ident(ident)                => ident_can_begin_type(ident), // type name or keyword
-            OpenDelim(Paren)            => true, // tuple
-            OpenDelim(Bracket)          => true, // array
-            Underscore                  => true, // placeholder
-            Not                         => true, // never
-            BinOp(Star)                 => true, // raw pointer
-            BinOp(And)                  => true, // reference
-            AndAnd                      => true, // double reference
-            Question                    => true, // maybe bound in trait object
-            Lifetime(..)                => true, // lifetime bound in trait object
-            Lt | BinOp(Shl)             => true, // associated path
+            OpenDelim(Paren)            | // tuple
+            OpenDelim(Bracket)          | // array
+            Underscore                  | // placeholder
+            Not                         | // never
+            BinOp(Star)                 | // raw pointer
+            BinOp(And)                  | // reference
+            AndAnd                      | // double reference
+            Question                    | // maybe bound in trait object
+            Lifetime(..)                | // lifetime bound in trait object
+            Lt | BinOp(Shl)             | // associated path
             ModSep                      => true, // global path
             Interpolated(ref nt) => match **nt {
                 NtIdent(..) | NtTy(..) | NtPath(..) => true,
index 1d67c2a2c2b74023c8f1fa1c7c5e4dc4ca8d9e6b..e893c859247c6110790921f528c04c99f9403fae 100644 (file)
 //! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
 //! and point-in-infinite-stream senses freely.
 //!
-//! There is a parallel ring buffer, 'size', that holds the calculated size of
+//! There is a parallel ring buffer, `size`, that holds the calculated size of
 //! each token. Why calculated? Because for Begin/End pairs, the "size"
 //! includes everything between the pair. That is, the "size" of Begin is
 //! actually the sum of the sizes of everything between Begin and the paired
-//! End that follows. Since that is arbitrarily far in the future, 'size' is
+//! End that follows. Since that is arbitrarily far in the future, `size` is
 //! being rewritten regularly while the printer runs; in fact most of the
-//! machinery is here to work out 'size' entries on the fly (and give up when
+//! machinery is here to work out `size` entries on the fly (and give up when
 //! they're so obviously over-long that "infinity" is a good enough
 //! approximation for purposes of line breaking).
 //!
 //! The "input side" of the printer is managed as an abstract process called
-//! SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in
+//! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in
 //! other words, the process of calculating 'size' entries.
 //!
 //! The "output side" of the printer is managed by an abstract process called
-//! PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
+//! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to
 //! do with each token/size pair it consumes as it goes. It's trying to consume
 //! the entire buffered window, but can't output anything until the size is >=
 //! 0 (sizes are set to negative while they're pending calculation).
@@ -409,7 +409,7 @@ pub fn scan_pop_bottom(&mut self) -> usize {
     pub fn advance_right(&mut self) {
         self.right += 1;
         self.right %= self.buf_len;
-        assert!(self.right != self.left);
+        assert_ne!(self.right, self.left);
     }
     pub fn advance_left(&mut self) -> io::Result<()> {
         debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right,
index 0c7e8fda83766cbdbc8f4f0df87c1e42b5f9c523..83c289ff80b9251bd2df854ebed1207b7f5a09f0 100644 (file)
@@ -233,7 +233,7 @@ pub fn token_to_string(tok: &Token) -> String {
         token::CloseDelim(token::Bracket) => "]".to_string(),
         token::OpenDelim(token::Brace) => "{".to_string(),
         token::CloseDelim(token::Brace) => "}".to_string(),
-        token::OpenDelim(token::NoDelim) => " ".to_string(),
+        token::OpenDelim(token::NoDelim) |
         token::CloseDelim(token::NoDelim) => " ".to_string(),
         token::Pound                => "#".to_string(),
         token::Dollar               => "$".to_string(),
@@ -244,7 +244,7 @@ pub fn token_to_string(tok: &Token) -> String {
             let mut out = match lit {
                 token::Byte(b)           => format!("b'{}'", b),
                 token::Char(c)           => format!("'{}'", c),
-                token::Float(c)          => c.to_string(),
+                token::Float(c)          |
                 token::Integer(c)        => c.to_string(),
                 token::Str_(s)           => format!("\"{}\"", s),
                 token::StrRaw(s, n)      => format!("r{delim}\"{string}\"{delim}",
@@ -277,23 +277,23 @@ pub fn token_to_string(tok: &Token) -> String {
         token::Shebang(s)           => format!("/* shebang: {}*/", s),
 
         token::Interpolated(ref nt) => match **nt {
-            token::NtExpr(ref e)        => expr_to_string(&e),
-            token::NtMeta(ref e)        => meta_item_to_string(&e),
-            token::NtTy(ref e)          => ty_to_string(&e),
-            token::NtPath(ref e)        => path_to_string(&e),
-            token::NtItem(ref e)        => item_to_string(&e),
-            token::NtBlock(ref e)       => block_to_string(&e),
-            token::NtStmt(ref e)        => stmt_to_string(&e),
-            token::NtPat(ref e)         => pat_to_string(&e),
+            token::NtExpr(ref e)        => expr_to_string(e),
+            token::NtMeta(ref e)        => meta_item_to_string(e),
+            token::NtTy(ref e)          => ty_to_string(e),
+            token::NtPath(ref e)        => path_to_string(e),
+            token::NtItem(ref e)        => item_to_string(e),
+            token::NtBlock(ref e)       => block_to_string(e),
+            token::NtStmt(ref e)        => stmt_to_string(e),
+            token::NtPat(ref e)         => pat_to_string(e),
             token::NtIdent(ref e)       => ident_to_string(e.node),
             token::NtTT(ref tree)       => tt_to_string(tree.clone()),
-            token::NtArm(ref e)         => arm_to_string(&e),
-            token::NtImplItem(ref e)    => impl_item_to_string(&e),
-            token::NtTraitItem(ref e)   => trait_item_to_string(&e),
-            token::NtGenerics(ref e)    => generics_to_string(&e),
-            token::NtWhereClause(ref e) => where_clause_to_string(&e),
-            token::NtArg(ref e)         => arg_to_string(&e),
-            token::NtVis(ref e)         => vis_to_string(&e),
+            token::NtArm(ref e)         => arm_to_string(e),
+            token::NtImplItem(ref e)    => impl_item_to_string(e),
+            token::NtTraitItem(ref e)   => trait_item_to_string(e),
+            token::NtGenerics(ref e)    => generics_to_string(e),
+            token::NtWhereClause(ref e) => where_clause_to_string(e),
+            token::NtArg(ref e)         => arg_to_string(e),
+            token::NtVis(ref e)         => vis_to_string(e),
         }
     }
 }
@@ -520,8 +520,7 @@ fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> {
 
         let mut result = None;
 
-        if let &Some(ref lits) = self.literals()
-        {
+        if let Some(ref lits) = *self.literals() {
             while cur_lit < lits.len() {
                 let ltrl = (*lits)[cur_lit].clone();
                 if ltrl.pos > pos { break; }
@@ -618,11 +617,8 @@ fn next_comment(&mut self) -> Option<comments::Comment> {
 
     fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
         self.maybe_print_comment(lit.span.lo)?;
-        match self.next_lit(lit.span.lo) {
-            Some(ref ltrl) => {
-                return word(self.writer(), &(*ltrl).lit);
-            }
-            _ => ()
+        if let Some(ref ltrl) = self.next_lit(lit.span.lo) {
+            return word(self.writer(), &(*ltrl).lit);
         }
         match lit.node {
             ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
@@ -799,7 +795,7 @@ fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> {
                 self.popen()?;
                 self.commasep(Consistent,
                               &items[..],
-                              |s, i| s.print_meta_list_item(&i))?;
+                              |s, i| s.print_meta_list_item(i))?;
                 self.pclose()?;
             }
         }
@@ -982,14 +978,14 @@ pub fn commasep_cmnt<T, F, G>(&mut self,
 
     pub fn commasep_exprs(&mut self, b: Breaks,
                           exprs: &[P<ast::Expr>]) -> io::Result<()> {
-        self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span)
+        self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
     }
 
     pub fn print_mod(&mut self, _mod: &ast::Mod,
                      attrs: &[ast::Attribute]) -> io::Result<()> {
         self.print_inner_attributes(attrs)?;
         for item in &_mod.items {
-            self.print_item(&item)?;
+            self.print_item(item)?;
         }
         Ok(())
     }
@@ -1018,7 +1014,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
         match ty.node {
             ast::TyKind::Slice(ref ty) => {
                 word(&mut self.s, "[")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, "]")?;
             }
             ast::TyKind::Ptr(ref mt) => {
@@ -1040,7 +1036,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
             ast::TyKind::Tup(ref elts) => {
                 self.popen()?;
                 self.commasep(Inconsistent, &elts[..],
-                              |s, ty| s.print_type(&ty))?;
+                              |s, ty| s.print_type(ty))?;
                 if elts.len() == 1 {
                     word(&mut self.s, ",")?;
                 }
@@ -1048,7 +1044,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
             }
             ast::TyKind::Paren(ref typ) => {
                 self.popen()?;
-                self.print_type(&typ)?;
+                self.print_type(typ)?;
                 self.pclose()?;
             }
             ast::TyKind::BareFn(ref f) => {
@@ -1081,14 +1077,14 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
             }
             ast::TyKind::Array(ref ty, ref v) => {
                 word(&mut self.s, "[")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, "; ")?;
-                self.print_expr(&v)?;
+                self.print_expr(v)?;
                 word(&mut self.s, "]")?;
             }
             ast::TyKind::Typeof(ref e) => {
                 word(&mut self.s, "typeof(")?;
-                self.print_expr(&e)?;
+                self.print_expr(e)?;
                 word(&mut self.s, ")")?;
             }
             ast::TyKind::Infer => {
@@ -1130,7 +1126,7 @@ pub fn print_foreign_item(&mut self,
                 }
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&t)?;
+                self.print_type(t)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the head-ibox
                 self.end() // end the outer cbox
@@ -1187,7 +1183,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 self.head(&visibility_qualified(&item.vis, "extern crate"))?;
                 if let Some(p) = *optional_path {
                     let val = p.as_str();
-                    if val.contains("-") {
+                    if val.contains('-') {
                         self.print_string(&val, ast::StrStyle::Cooked)?;
                     } else {
                         self.print_name(p)?;
@@ -1203,7 +1199,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
             }
             ast::ItemKind::Use(ref vp) => {
                 self.head(&visibility_qualified(&item.vis, "use"))?;
-                self.print_view_path(&vp)?;
+                self.print_view_path(vp)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end inner head-block
                 self.end()?; // end outer head-block
@@ -1215,12 +1211,12 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 }
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 space(&mut self.s)?;
                 self.end()?; // end the head-ibox
 
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer cbox
             }
@@ -1228,12 +1224,12 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 self.head(&visibility_qualified(&item.vis, "const"))?;
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 space(&mut self.s)?;
                 self.end()?; // end the head-ibox
 
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer cbox
             }
@@ -1249,7 +1245,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                     &item.vis
                 )?;
                 word(&mut self.s, " ")?;
-                self.print_block_with_attrs(&body, &item.attrs)?;
+                self.print_block_with_attrs(body, &item.attrs)?;
             }
             ast::ItemKind::Mod(ref _mod) => {
                 self.head(&visibility_qualified(&item.vis, "mod"))?;
@@ -1282,7 +1278,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 self.print_where_clause(&params.where_clause)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer ibox
             }
@@ -1297,11 +1293,11 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
             }
             ast::ItemKind::Struct(ref struct_def, ref generics) => {
                 self.head(&visibility_qualified(&item.vis, "struct"))?;
-                self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+                self.print_struct(struct_def, generics, item.ident, item.span, true)?;
             }
             ast::ItemKind::Union(ref struct_def, ref generics) => {
                 self.head(&visibility_qualified(&item.vis, "union"))?;
-                self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+                self.print_struct(struct_def, generics, item.ident, item.span, true)?;
             }
             ast::ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
                 self.head("")?;
@@ -1333,11 +1329,8 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                     space(&mut self.s)?;
                 }
 
-                match polarity {
-                    ast::ImplPolarity::Negative => {
-                        word(&mut self.s, "!")?;
-                    },
-                    _ => {}
+                if polarity == ast::ImplPolarity::Negative {
+                    word(&mut self.s, "!")?;
                 }
 
                 if let Some(ref t) = *opt_trait {
@@ -1346,7 +1339,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                     self.word_space("for")?;
                 }
 
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 self.print_where_clause(&generics.where_clause)?;
 
                 space(&mut self.s)?;
@@ -1543,7 +1536,7 @@ pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
             Some(ref d) => {
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&d)
+                self.print_expr(d)
             }
             _ => Ok(())
         }
@@ -1571,7 +1564,7 @@ pub fn print_trait_item(&mut self, ti: &ast::TraitItem)
         self.print_outer_attributes(&ti.attrs)?;
         match ti.node {
             ast::TraitItemKind::Const(ref ty, ref default) => {
-                self.print_associated_const(ti.ident, &ty,
+                self.print_associated_const(ti.ident, ty,
                                             default.as_ref().map(|expr| &**expr),
                                             &ast::Visibility::Inherited)?;
             }
@@ -1614,7 +1607,7 @@ pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
         self.print_defaultness(ii.defaultness)?;
         match ii.node {
             ast::ImplItemKind::Const(ref ty, ref expr) => {
-                self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?;
+                self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?;
             }
             ast::ImplItemKind::Method(ref sig, ref body) => {
                 self.head("")?;
@@ -1650,38 +1643,38 @@ pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
                 self.word_nbsp("let")?;
 
                 self.ibox(INDENT_UNIT)?;
-                self.print_local_decl(&loc)?;
+                self.print_local_decl(loc)?;
                 self.end()?;
                 if let Some(ref init) = loc.init {
                     self.nbsp()?;
                     self.word_space("=")?;
-                    self.print_expr(&init)?;
+                    self.print_expr(init)?;
                 }
                 word(&mut self.s, ";")?;
                 self.end()?;
             }
-            ast::StmtKind::Item(ref item) => self.print_item(&item)?,
+            ast::StmtKind::Item(ref item) => self.print_item(item)?,
             ast::StmtKind::Expr(ref expr) => {
                 self.space_if_not_bol()?;
-                self.print_expr_outer_attr_style(&expr, false)?;
+                self.print_expr_outer_attr_style(expr, false)?;
                 if parse::classify::expr_requires_semi_to_be_stmt(expr) {
                     word(&mut self.s, ";")?;
                 }
             }
             ast::StmtKind::Semi(ref expr) => {
                 self.space_if_not_bol()?;
-                self.print_expr_outer_attr_style(&expr, false)?;
+                self.print_expr_outer_attr_style(expr, false)?;
                 word(&mut self.s, ";")?;
             }
             ast::StmtKind::Mac(ref mac) => {
                 let (ref mac, style, ref attrs) = **mac;
                 self.space_if_not_bol()?;
-                self.print_outer_attributes(&attrs)?;
+                self.print_outer_attributes(attrs)?;
                 let delim = match style {
                     ast::MacStmtStyle::Braces => token::Brace,
                     _ => token::Paren
                 };
-                self.print_mac(&mac, delim)?;
+                self.print_mac(mac, delim)?;
                 if style == ast::MacStmtStyle::Semicolon {
                     word(&mut self.s, ";")?;
                 }
@@ -1735,7 +1728,7 @@ pub fn print_block_maybe_unclosed(&mut self,
                 ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
                     self.maybe_print_comment(st.span.lo)?;
                     self.space_if_not_bol()?;
-                    self.print_expr_outer_attr_style(&expr, false)?;
+                    self.print_expr_outer_attr_style(expr, false)?;
                     self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
                 }
                 _ => self.print_stmt(st)?,
@@ -1755,9 +1748,9 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else if ")?;
-                        self.print_expr(&i)?;
+                        self.print_expr(i)?;
                         space(&mut self.s)?;
-                        self.print_block(&then)?;
+                        self.print_block(then)?;
                         self.print_else(e.as_ref().map(|e| &**e))
                     }
                     // "another else-if-let"
@@ -1765,12 +1758,12 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else if let ")?;
-                        self.print_pat(&pat)?;
+                        self.print_pat(pat)?;
                         space(&mut self.s)?;
                         self.word_space("=")?;
-                        self.print_expr(&expr)?;
+                        self.print_expr(expr)?;
                         space(&mut self.s)?;
-                        self.print_block(&then)?;
+                        self.print_block(then)?;
                         self.print_else(e.as_ref().map(|e| &**e))
                     }
                     // "final else"
@@ -1778,7 +1771,7 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else ")?;
-                        self.print_block(&b)
+                        self.print_block(b)
                     }
                     // BLEAH, constraints would be great here
                     _ => {
@@ -1844,12 +1837,8 @@ pub fn check_expr_bin_needs_paren(&mut self, sub_expr: &ast::Expr,
                                       binop: ast::BinOp) -> bool {
         match sub_expr.node {
             ast::ExprKind::Binary(ref sub_op, _, _) => {
-                if AssocOp::from_ast_binop(sub_op.node).precedence() <
-                    AssocOp::from_ast_binop(binop.node).precedence() {
-                    true
-                } else {
-                    false
-                }
+                AssocOp::from_ast_binop(sub_op.node).precedence() <
+                    AssocOp::from_ast_binop(binop.node).precedence()
             }
             _ => true
         }
@@ -1929,7 +1918,7 @@ fn print_expr_struct(&mut self,
                     space(&mut self.s)?;
                 }
                 word(&mut self.s, "..")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 self.end()?;
             }
             _ => if !fields.is_empty() {
@@ -1969,7 +1958,7 @@ fn print_expr_method_call(&mut self,
         if !tys.is_empty() {
             word(&mut self.s, "::<")?;
             self.commasep(Inconsistent, tys,
-                          |s, ty| s.print_type(&ty))?;
+                          |s, ty| s.print_type(ty))?;
             word(&mut self.s, ">")?;
         }
         self.print_call_post(base_args)
@@ -2038,7 +2027,7 @@ fn print_expr_outer_attr_style(&mut self,
                 self.print_expr_vec(&exprs[..], attrs)?;
             }
             ast::ExprKind::Repeat(ref element, ref count) => {
-                self.print_expr_repeat(&element, &count, attrs)?;
+                self.print_expr_repeat(element, count, attrs)?;
             }
             ast::ExprKind::Struct(ref path, ref fields, ref wth) => {
                 self.print_expr_struct(path, &fields[..], wth, attrs)?;
@@ -2047,43 +2036,43 @@ fn print_expr_outer_attr_style(&mut self,
                 self.print_expr_tup(&exprs[..], attrs)?;
             }
             ast::ExprKind::Call(ref func, ref args) => {
-                self.print_expr_call(&func, &args[..])?;
+                self.print_expr_call(func, &args[..])?;
             }
             ast::ExprKind::MethodCall(ident, ref tys, ref args) => {
                 self.print_expr_method_call(ident, &tys[..], &args[..])?;
             }
             ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
-                self.print_expr_binary(op, &lhs, &rhs)?;
+                self.print_expr_binary(op, lhs, rhs)?;
             }
             ast::ExprKind::Unary(op, ref expr) => {
-                self.print_expr_unary(op, &expr)?;
+                self.print_expr_unary(op, expr)?;
             }
             ast::ExprKind::AddrOf(m, ref expr) => {
-                self.print_expr_addr_of(m, &expr)?;
+                self.print_expr_addr_of(m, expr)?;
             }
             ast::ExprKind::Lit(ref lit) => {
-                self.print_literal(&lit)?;
+                self.print_literal(lit)?;
             }
             ast::ExprKind::Cast(ref expr, ref ty) => {
                 if let ast::ExprKind::Cast(..) = expr.node {
-                    self.print_expr(&expr)?;
+                    self.print_expr(expr)?;
                 } else {
-                    self.print_expr_maybe_paren(&expr)?;
+                    self.print_expr_maybe_paren(expr)?;
                 }
                 space(&mut self.s)?;
                 self.word_space("as")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
             }
             ast::ExprKind::Type(ref expr, ref ty) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
             }
             ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
-                self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
+                self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?;
             }
             ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
-                self.print_if_let(&pat, &expr, &blk, elseopt.as_ref().map(|e| &**e))?;
+                self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
             }
             ast::ExprKind::While(ref test, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2091,9 +2080,9 @@ fn print_expr_outer_attr_style(&mut self,
                     self.word_space(":")?;
                 }
                 self.head("while")?;
-                self.print_expr(&test)?;
+                self.print_expr(test)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2101,12 +2090,12 @@ fn print_expr_outer_attr_style(&mut self,
                     self.word_space(":")?;
                 }
                 self.head("while let")?;
-                self.print_pat(&pat)?;
+                self.print_pat(pat)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2114,12 +2103,12 @@ fn print_expr_outer_attr_style(&mut self,
                     self.word_space(":")?;
                 }
                 self.head("for")?;
-                self.print_pat(&pat)?;
+                self.print_pat(pat)?;
                 space(&mut self.s)?;
                 self.word_space("in")?;
-                self.print_expr(&iter)?;
+                self.print_expr(iter)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Loop(ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2128,13 +2117,13 @@ fn print_expr_outer_attr_style(&mut self,
                 }
                 self.head("loop")?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Match(ref expr, ref arms) => {
                 self.cbox(INDENT_UNIT)?;
                 self.ibox(4)?;
                 self.word_nbsp("match")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 space(&mut self.s)?;
                 self.bopen()?;
                 self.print_inner_attributes_no_trailing_hardbreak(attrs)?;
@@ -2146,7 +2135,7 @@ fn print_expr_outer_attr_style(&mut self,
             ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
                 self.print_capture_clause(capture_clause)?;
 
-                self.print_fn_block_args(&decl)?;
+                self.print_fn_block_args(decl)?;
                 space(&mut self.s)?;
                 self.print_expr(body)?;
                 self.end()?; // need to close a box
@@ -2161,48 +2150,48 @@ fn print_expr_outer_attr_style(&mut self,
                 self.cbox(INDENT_UNIT)?;
                 // head-box, will be closed by print-block after {
                 self.ibox(0)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Assign(ref lhs, ref rhs) => {
-                self.print_expr(&lhs)?;
+                self.print_expr(lhs)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&rhs)?;
+                self.print_expr(rhs)?;
             }
             ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
-                self.print_expr(&lhs)?;
+                self.print_expr(lhs)?;
                 space(&mut self.s)?;
                 word(&mut self.s, op.node.to_string())?;
                 self.word_space("=")?;
-                self.print_expr(&rhs)?;
+                self.print_expr(rhs)?;
             }
             ast::ExprKind::Field(ref expr, id) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ".")?;
                 self.print_ident(id.node)?;
             }
             ast::ExprKind::TupField(ref expr, id) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ".")?;
                 self.print_usize(id.node)?;
             }
             ast::ExprKind::Index(ref expr, ref index) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, "[")?;
-                self.print_expr(&index)?;
+                self.print_expr(index)?;
                 word(&mut self.s, "]")?;
             }
             ast::ExprKind::Range(ref start, ref end, limits) => {
-                if let &Some(ref e) = start {
-                    self.print_expr(&e)?;
+                if let Some(ref e) = *start {
+                    self.print_expr(e)?;
                 }
                 if limits == ast::RangeLimits::HalfOpen {
                     word(&mut self.s, "..")?;
                 } else {
                     word(&mut self.s, "...")?;
                 }
-                if let &Some(ref e) = end {
-                    self.print_expr(&e)?;
+                if let Some(ref e) = *end {
+                    self.print_expr(e)?;
                 }
             }
             ast::ExprKind::Path(None, ref path) => {
@@ -2233,12 +2222,9 @@ fn print_expr_outer_attr_style(&mut self,
             }
             ast::ExprKind::Ret(ref result) => {
                 word(&mut self.s, "return")?;
-                match *result {
-                    Some(ref expr) => {
-                        word(&mut self.s, " ")?;
-                        self.print_expr(&expr)?;
-                    }
-                    _ => ()
+                if let Some(ref expr) = *result {
+                    word(&mut self.s, " ")?;
+                    self.print_expr(expr)?;
                 }
             }
             ast::ExprKind::InlineAsm(ref a) => {
@@ -2268,7 +2254,7 @@ fn print_expr_outer_attr_style(&mut self,
                 self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
                     s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
                     s.popen()?;
-                    s.print_expr(&o)?;
+                    s.print_expr(o)?;
                     s.pclose()?;
                     Ok(())
                 })?;
@@ -2308,7 +2294,7 @@ fn print_expr_outer_attr_style(&mut self,
             ast::ExprKind::Paren(ref e) => {
                 self.popen()?;
                 self.print_inner_attributes_inline(attrs)?;
-                self.print_expr(&e)?;
+                self.print_expr(e)?;
                 self.pclose()?;
             },
             ast::ExprKind::Try(ref e) => {
@@ -2318,7 +2304,7 @@ fn print_expr_outer_attr_style(&mut self,
             ast::ExprKind::Catch(ref blk) => {
                 self.head("do catch")?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?
+                self.print_block_with_attrs(blk, attrs)?
             }
         }
         self.ann.post(self, NodeExpr(expr))?;
@@ -2329,7 +2315,7 @@ pub fn print_local_decl(&mut self, loc: &ast::Local) -> io::Result<()> {
         self.print_pat(&loc.pat)?;
         if let Some(ref ty) = loc.ty {
             self.word_space(":")?;
-            self.print_type(&ty)?;
+            self.print_type(ty)?;
         }
         Ok(())
     }
@@ -2397,7 +2383,7 @@ fn print_qpath(&mut self,
             space(&mut self.s)?;
             self.word_space("as")?;
             let depth = path.segments.len() - qself.position;
-            self.print_path(&path, false, depth, false)?;
+            self.print_path(path, false, depth, false)?;
         }
         word(&mut self.s, ">")?;
         word(&mut self.s, "::")?;
@@ -2438,7 +2424,7 @@ fn print_path_parameters(&mut self,
                     self.commasep(
                         Inconsistent,
                         &data.types,
-                        |s, ty| s.print_type(&ty))?;
+                        |s, ty| s.print_type(ty))?;
                         comma = true;
                 }
 
@@ -2461,13 +2447,13 @@ fn print_path_parameters(&mut self,
                 self.commasep(
                     Inconsistent,
                     &data.inputs,
-                    |s, ty| s.print_type(&ty))?;
+                    |s, ty| s.print_type(ty))?;
                 word(&mut self.s, ")")?;
 
                 if let Some(ref ty) = data.output {
                     self.space_if_not_bol()?;
                     self.word_space("->")?;
-                    self.print_type(&ty)?;
+                    self.print_type(ty)?;
                 }
             }
         }
@@ -2496,24 +2482,24 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
                 self.print_ident(path1.node)?;
                 if let Some(ref p) = *sub {
                     word(&mut self.s, "@")?;
-                    self.print_pat(&p)?;
+                    self.print_pat(p)?;
                 }
             }
             PatKind::TupleStruct(ref path, ref elts, ddpos) => {
                 self.print_path(path, true, 0, false)?;
                 self.popen()?;
                 if let Some(ddpos) = ddpos {
-                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
                     if ddpos != 0 {
                         self.word_space(",")?;
                     }
                     word(&mut self.s, "..")?;
                     if ddpos != elts.len() {
                         word(&mut self.s, ",")?;
-                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
                     }
                 } else {
-                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
                 }
                 self.pclose()?;
             }
@@ -2549,17 +2535,17 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
             PatKind::Tuple(ref elts, ddpos) => {
                 self.popen()?;
                 if let Some(ddpos) = ddpos {
-                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
                     if ddpos != 0 {
                         self.word_space(",")?;
                     }
                     word(&mut self.s, "..")?;
                     if ddpos != elts.len() {
                         word(&mut self.s, ",")?;
-                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
                     }
                 } else {
-                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
                     if elts.len() == 1 {
                         word(&mut self.s, ",")?;
                     }
@@ -2568,41 +2554,41 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
             }
             PatKind::Box(ref inner) => {
                 word(&mut self.s, "box ")?;
-                self.print_pat(&inner)?;
+                self.print_pat(inner)?;
             }
             PatKind::Ref(ref inner, mutbl) => {
                 word(&mut self.s, "&")?;
                 if mutbl == ast::Mutability::Mutable {
                     word(&mut self.s, "mut ")?;
                 }
-                self.print_pat(&inner)?;
+                self.print_pat(inner)?;
             }
             PatKind::Lit(ref e) => self.print_expr(&**e)?,
             PatKind::Range(ref begin, ref end, ref end_kind) => {
-                self.print_expr(&begin)?;
+                self.print_expr(begin)?;
                 space(&mut self.s)?;
                 match *end_kind {
                     RangeEnd::Included => word(&mut self.s, "...")?,
                     RangeEnd::Excluded => word(&mut self.s, "..")?,
                 }
-                self.print_expr(&end)?;
+                self.print_expr(end)?;
             }
             PatKind::Slice(ref before, ref slice, ref after) => {
                 word(&mut self.s, "[")?;
                 self.commasep(Inconsistent,
                                    &before[..],
-                                   |s, p| s.print_pat(&p))?;
+                                   |s, p| s.print_pat(p))?;
                 if let Some(ref p) = *slice {
                     if !before.is_empty() { self.word_space(",")?; }
                     if p.node != PatKind::Wild {
-                        self.print_pat(&p)?;
+                        self.print_pat(p)?;
                     }
                     word(&mut self.s, "..")?;
                     if !after.is_empty() { self.word_space(",")?; }
                 }
                 self.commasep(Inconsistent,
                                    &after[..],
-                                   |s, p| s.print_pat(&p))?;
+                                   |s, p| s.print_pat(p))?;
                 word(&mut self.s, "]")?;
             }
             PatKind::Mac(ref m) => self.print_mac(m, token::Paren)?,
@@ -2628,12 +2614,12 @@ fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> {
                 space(&mut self.s)?;
                 self.word_space("|")?;
             }
-            self.print_pat(&p)?;
+            self.print_pat(p)?;
         }
         space(&mut self.s)?;
         if let Some(ref e) = arm.guard {
             self.word_space("if")?;
-            self.print_expr(&e)?;
+            self.print_expr(e)?;
             space(&mut self.s)?;
         }
         self.word_space("=>")?;
@@ -2641,7 +2627,7 @@ fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> {
         match arm.body.node {
             ast::ExprKind::Block(ref blk) => {
                 // the block will close the pattern's ibox
-                self.print_block_unclosed_indent(&blk, INDENT_UNIT)?;
+                self.print_block_unclosed_indent(blk, INDENT_UNIT)?;
 
                 // If it is a user-provided unsafe block, print a comma after it
                 if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
@@ -2673,7 +2659,7 @@ fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) -> io::Resu
                 self.print_mutability(m)?;
                 word(&mut self.s, "self")?;
                 self.word_space(":")?;
-                self.print_type(&typ)
+                self.print_type(typ)
             }
         }
     }
@@ -2725,7 +2711,7 @@ pub fn print_fn_block_args(
         self.word_space("->")?;
         match decl.output {
             ast::FunctionRetTy::Ty(ref ty) => {
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 self.maybe_print_comment(ty.span.lo)
             }
             ast::FunctionRetTy::Default(..) => unreachable!(),
@@ -2839,7 +2825,7 @@ pub fn print_ty_param(&mut self, param: &ast::TyParam) -> io::Result<()> {
             Some(ref default) => {
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_type(&default)
+                self.print_type(default)
             }
             _ => Ok(())
         }
@@ -2865,7 +2851,7 @@ pub fn print_where_clause(&mut self, where_clause: &ast::WhereClause)
                                                                              ref bounds,
                                                                              ..}) => {
                     self.print_formal_lifetime_list(bound_lifetimes)?;
-                    self.print_type(&bounded_ty)?;
+                    self.print_type(bounded_ty)?;
                     self.print_bounds(":", bounds)?;
                 }
                 ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime,
@@ -2977,7 +2963,7 @@ pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> io::Result<()> {
         match decl.output {
             ast::FunctionRetTy::Default(..) => unreachable!(),
             ast::FunctionRetTy::Ty(ref ty) =>
-                self.print_type(&ty)?
+                self.print_type(ty)?
         }
         self.end()?;
 
@@ -3044,14 +3030,9 @@ pub fn print_remaining_comments(&mut self) -> io::Result<()> {
         if self.next_comment().is_none() {
             hardbreak(&mut self.s)?;
         }
-        loop {
-            match self.next_comment() {
-                Some(ref cmnt) => {
-                    self.print_comment(cmnt)?;
-                    self.cur_cmnt_and_lit.cur_cmnt += 1;
-                }
-                _ => break
-            }
+        while let Some(ref cmnt) = self.next_comment() {
+            self.print_comment(cmnt)?;
+            self.cur_cmnt_and_lit.cur_cmnt += 1;
         }
         Ok(())
     }
index c7820a15fb3d23d9f8c0a6fb41c59e2e2af45012..8e257102e1c13367d281d01cce701e94bb0bacf5 100644 (file)
@@ -18,7 +18,7 @@
 use tokenstream::TokenStream;
 
 /// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
 /// The expanded code uses the unstable `#[prelude_import]` attribute.
 fn ignored_span(sp: Span) -> Span {
     let mark = Mark::fresh();
@@ -49,7 +49,7 @@ pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option<Strin
         None => return krate,
     };
 
-    let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
+    let crate_name = Symbol::intern(&alt_std_name.unwrap_or_else(|| name.to_string()));
 
     krate.module.items.insert(0, P(ast::Item {
         attrs: vec![attr::mk_attr_outer(DUMMY_SP,
index 91746a2edd9b2a62abc70b84d7647ce277b5174a..bb1a6ff65a596018a263bc7aa94e941e24da1b78 100644 (file)
@@ -106,9 +106,8 @@ fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate {
         // Add a special __test module to the crate that will contain code
         // generated for the test harness
         let (mod_, reexport) = mk_test_module(&mut self.cx);
-        match reexport {
-            Some(re) => folded.module.items.push(re),
-            None => {}
+        if let Some(re) = reexport {
+            folded.module.items.push(re)
         }
         folded.module.items.push(mod_);
         folded
@@ -257,7 +256,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt,
     let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
     cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
     let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
-        ident: sym.clone(),
+        ident: sym,
         attrs: Vec::new(),
         id: ast::DUMMY_NODE_ID,
         node: ast::ItemKind::Mod(reexport_mod),
@@ -308,7 +307,7 @@ fn generate_test_harness(sess: &ParseSess,
 }
 
 /// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
 /// The expanded code calls some unstable functions in the test crate.
 fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
     Span { ctxt: cx.ctxt, ..sp }
@@ -354,7 +353,7 @@ fn has_test_signature(i: &ast::Item) -> HasTestSignature {
         }
     }
 
-    return has_test_attr && has_test_signature(i) == Yes;
+    has_test_attr && has_test_signature(i) == Yes
 }
 
 fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
@@ -385,7 +384,7 @@ fn has_test_signature(i: &ast::Item) -> bool {
                       `fn(&mut Bencher) -> ()`");
     }
 
-    return has_bench_attr && has_test_signature(i);
+    has_bench_attr && has_test_signature(i)
 }
 
 fn is_ignored(i: &ast::Item) -> bool {
@@ -504,16 +503,14 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
                            ast::Unsafety::Normal,
                            dummy_spanned(ast::Constness::NotConst),
                            ::abi::Abi::Rust, ast::Generics::default(), main_body);
-    let main = P(ast::Item {
+    P(ast::Item {
         ident: Ident::from_str("main"),
         attrs: vec![main_attr],
         id: ast::DUMMY_NODE_ID,
         node: main,
         vis: ast::Visibility::Public,
         span: sp
-    });
-
-    return main;
+    })
 }
 
 fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
index 86bfdebe42b0082e206b6a836b66916ce4061846..9c1371a31fec7a9287f9f874c5d59defe042056e 100644 (file)
 
 //! # Token Streams
 //!
-//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
 //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
 //! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
 //!
 //! ## Ownership
-//! TokenStreams are persistent data structures constructed as ropes with reference
-//! counted-children. In general, this means that calling an operation on a TokenStream
-//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
-//! the original. This essentially coerces TokenStreams into 'views' of their subparts,
-//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
+//! `TokenStreams` are persistent data structures constructed as ropes with reference
+//! counted-children. In general, this means that calling an operation on a `TokenStream`
+//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
+//! the original. This essentially coerces `TokenStream`s into 'views' of their subparts,
+//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
 //! ownership of the original.
 
 use syntax_pos::{BytePos, Span, DUMMY_SP};
@@ -88,7 +88,7 @@ pub fn stream(&self) -> TokenStream {
 /// If the syntax extension is an MBE macro, it will attempt to match its
 /// LHS token tree against the provided token tree, and if it finds a
 /// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
+/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
 ///
 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
index a6fff2d707469ffd2d4d1607dfb658b62b4a1139..9307f3c58d4b0eb5d6942e9e8e6ee5ac23769688 100644 (file)
@@ -53,9 +53,10 @@ pub fn find_best_match_for_name<'a, T>(iter_names: T,
     iter_names
     .filter_map(|&name| {
         let dist = lev_distance(lookup, &name.as_str());
-        match dist <= max_dist {    // filter the unwanted cases
-            true => Some((name, dist)),
-            false => None,
+        if dist <= max_dist {    // filter the unwanted cases
+            Some((name, dist))
+        } else {
+            None
         }
     })
     .min_by_key(|&(_, val)| val)    // extract the tuple containing the minimum edit distance
index fe05e2958b3a851dd1e6828a99560cf07b3e082a..8cc37afa354ffbeed217349ae7d96cf01f1f0f28 100644 (file)
@@ -37,10 +37,10 @@ fn move_flat_map<F, I>(mut self, mut f: F) -> Self
                 // move the read_i'th item out of the vector and map it
                 // to an iterator
                 let e = ptr::read(self.get_unchecked(read_i));
-                let mut iter = f(e).into_iter();
+                let iter = f(e).into_iter();
                 read_i += 1;
 
-                while let Some(e) = iter.next() {
+                for e in iter {
                     if write_i < read_i {
                         ptr::write(self.get_unchecked_mut(write_i), e);
                         write_i += 1;
@@ -93,10 +93,10 @@ fn move_flat_map<F, I>(mut self, mut f: F) -> Self
                 // move the read_i'th item out of the vector and map it
                 // to an iterator
                 let e = ptr::read(self.get_unchecked(read_i));
-                let mut iter = f(e).into_iter();
+                let iter = f(e).into_iter();
                 read_i += 1;
 
-                while let Some(e) = iter.next() {
+                for e in iter {
                     if write_i < read_i {
                         ptr::write(self.get_unchecked_mut(write_i), e);
                         write_i += 1;
index 9288d95009c1de0822b4f3378ce9dd796cc8509e..0b484a4e0af5b5fa21b17ee21f17a68db79c40d8 100644 (file)
@@ -345,9 +345,7 @@ pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) {
             visitor.visit_ty(ty);
             visitor.visit_expr(expression)
         }
-        TyKind::TraitObject(ref bounds) => {
-            walk_list!(visitor, visit_ty_param_bound, bounds);
-        }
+        TyKind::TraitObject(ref bounds) |
         TyKind::ImplTrait(ref bounds) => {
             walk_list!(visitor, visit_ty_param_bound, bounds);
         }
@@ -542,7 +540,7 @@ pub fn walk_fn<'a, V>(visitor: &mut V, kind: FnKind<'a>, declaration: &'a FnDecl
             walk_fn_decl(visitor, declaration);
             visitor.visit_block(body);
         }
-        FnKind::Method(_, ref sig, _, body) => {
+        FnKind::Method(_, sig, _, body) => {
             visitor.visit_generics(&sig.generics);
             walk_fn_decl(visitor, declaration);
             visitor.visit_block(body);
@@ -778,7 +776,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
         }
         ExprKind::InlineAsm(ref ia) => {
             for &(_, ref input) in &ia.inputs {
-                visitor.visit_expr(&input)
+                visitor.visit_expr(input)
             }
             for output in &ia.outputs {
                 visitor.visit_expr(&output.expr)
diff --git a/src/test/compile-fail/method-help-unsatisfied-bound.rs b/src/test/compile-fail/method-help-unsatisfied-bound.rs
deleted file mode 100644 (file)
index 6416d54..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-struct Foo;
-
-fn main() {
-    let a: Result<(), Foo> = Ok(());
-    a.unwrap();
-    //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
-    //~| NOTE the following trait bounds were not satisfied: `Foo : std::fmt::Debug`
-}
diff --git a/src/test/mir-opt/issue-41697.rs b/src/test/mir-opt/issue-41697.rs
new file mode 100644 (file)
index 0000000..47eeffe
--- /dev/null
@@ -0,0 +1,48 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41697. Using dump-mir was triggering
+// artificial cycles: during type-checking, we had to get the MIR for
+// the constant expressions in `[u8; 2]`, which in turn would trigger
+// an attempt to get the item-path, which in turn would request the
+// types of the impl, which would trigger a cycle. We supressed this
+// cycle now by forcing mir-dump to avoid asking for types of an impl.
+
+#![feature(rustc_attrs)]
+
+use std::sync::Arc;
+
+trait Foo {
+    fn get(&self) -> [u8; 2];
+}
+
+impl Foo for [u8; 2] {
+    fn get(&self) -> [u8; 2] {
+        *self
+    }
+}
+
+struct Bar<T: ?Sized>(T);
+
+fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
+    x
+}
+
+fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
+    x
+}
+
+fn main() {
+    let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
+    assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
+
+    let x: Arc<Foo + Send> = Arc::new([3, 4]);
+    assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
+}
diff --git a/src/test/run-pass/issue-41697.rs b/src/test/run-pass/issue-41697.rs
deleted file mode 100644 (file)
index d59b6a1..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-Zdump-mir=NEVER_MATCHED
-
-// Regression test for #41697. Using dump-mir was triggering
-// artificial cycles: during type-checking, we had to get the MIR for
-// the constant expressions in `[u8; 2]`, which in turn would trigger
-// an attempt to get the item-path, which in turn would request the
-// types of the impl, which would trigger a cycle. We supressed this
-// cycle now by forcing mir-dump to avoid asking for types of an impl.
-
-#![feature(rustc_attrs)]
-
-use std::sync::Arc;
-
-trait Foo {
-    fn get(&self) -> [u8; 2];
-}
-
-impl Foo for [u8; 2] {
-    fn get(&self) -> [u8; 2] {
-        *self
-    }
-}
-
-struct Bar<T: ?Sized>(T);
-
-fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
-    x
-}
-
-fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
-    x
-}
-
-fn main() {
-    let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
-    assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
-
-    let x: Arc<Foo + Send> = Arc::new([3, 4]);
-    assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
-}
diff --git a/src/test/run-pass/issue-41803.rs b/src/test/run-pass/issue-41803.rs
new file mode 100644 (file)
index 0000000..e18b420
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// A compile-time map from identifiers to arbitrary (heterogeneous) expressions
+macro_rules! ident_map {
+    ( $name:ident = { $($key:ident => $e:expr,)* } ) => {
+        macro_rules! $name {
+            $(
+                ( $key ) => { $e };
+            )*
+            // Empty invocation expands to nothing. Needed when the map is empty.
+            () => {};
+        }
+    };
+}
+
+ident_map!(my_map = {
+    main => 0,
+});
+
+fn main() {
+    my_map!(main);
+}
diff --git a/src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs b/src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs
new file mode 100644 (file)
index 0000000..bfbead8
--- /dev/null
@@ -0,0 +1,38 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41936. The coerce-unsized trait check in
+// coherence was using subtyping, which triggered variance
+// computation, which failed because it required type info for fields
+// that had not (yet) been computed.
+
+#![feature(unsize)]
+#![feature(coerce_unsized)]
+
+use std::{marker,ops};
+
+// Change the array to a non-array, and error disappears
+// Adding a new field to the end keeps the error
+struct LogDataBuf([u8;8]);
+
+struct Aref<T: ?Sized>
+{
+    // Inner structure triggers the error, removing the inner removes the message.
+    ptr: Box<ArefInner<T>>,
+}
+impl<T: ?Sized + marker::Unsize<U>, U: ?Sized> ops::CoerceUnsized<Aref<U>> for Aref<T> {}
+
+struct ArefInner<T: ?Sized>
+{
+    // Even with this field commented out, the error is raised.
+    data: T,
+}
+
+fn main(){}
index 0e78746704fb150a838c7a885902e729a947d7f1..5c64b4118c3ab1a748e2175b3f74c82baa123efc 100644 (file)
@@ -18,9 +18,9 @@ impl Foo {
     pub fn rust0() {}
     // @has - '//code' 'fn rust1()'
     pub extern "Rust" fn rust1() {}
-    // @has - '//code' 'extern fn c0()'
+    // @has - '//code' 'extern "C" fn c0()'
     pub extern fn c0() {}
-    // @has - '//code' 'extern fn c1()'
+    // @has - '//code' 'extern "C" fn c1()'
     pub extern "C" fn c1() {}
     // @has - '//code' 'extern "system" fn system0()'
     pub extern "system" fn system0() {}
@@ -31,7 +31,7 @@ pub trait Bar {}
 
 // @has - '//code' 'impl Bar for fn()'
 impl Bar for fn() {}
-// @has - '//code' 'impl Bar for extern fn()'
+// @has - '//code' 'impl Bar for extern "C" fn()'
 impl Bar for extern fn() {}
 // @has - '//code' 'impl Bar for extern "system" fn()'
 impl Bar for extern "system" fn() {}
index 3997dcd81e153e75a20cb5a7fc2a5447e59ce788..8511d461703de58dfc9cf656656f0ec2b1839e53 100644 (file)
 
 extern crate rustdoc_ffi as lib;
 
-// @has ffi/fn.foreigner.html //pre 'pub unsafe extern fn foreigner(cold_as_ice: u32)'
+// @has ffi/fn.foreigner.html //pre 'pub unsafe extern "C" fn foreigner(cold_as_ice: u32)'
 pub use lib::foreigner;
 
 extern "C" {
-    // @has ffi/fn.another.html //pre 'pub unsafe extern fn another(cold_as_ice: u32)'
+    // @has ffi/fn.another.html //pre 'pub unsafe extern "C" fn another(cold_as_ice: u32)'
     pub fn another(cold_as_ice: u32);
 }
index 6f84428b0798ff36965118296d20a204c61fa4b3..75df53589454f6d3927e8d8fc382d9f3b4f7fc9d 100644 (file)
@@ -10,7 +10,7 @@
 
 extern {
     // @has issue_22038/fn.foo1.html \
-    //      '//*[@class="rust fn"]' 'pub unsafe extern fn foo1()'
+    //      '//*[@class="rust fn"]' 'pub unsafe extern "C" fn foo1()'
     pub fn foo1();
 }
 
@@ -21,7 +21,7 @@
 }
 
 // @has issue_22038/fn.bar.html \
-//      '//*[@class="rust fn"]' 'pub extern fn bar()'
+//      '//*[@class="rust fn"]' 'pub extern "C" fn bar()'
 pub extern fn bar() {}
 
 // @has issue_22038/fn.baz.html \
index 1b60c2a334fa5ea7ae15a4bd553a9cdd9bc791a6..6ba776ba4679f845c2dd9f3f880e586f7d056b34 100644 (file)
@@ -9,6 +9,6 @@
 // except according to those terms.
 
 extern "C" {
-    // @has variadic/fn.foo.html //pre 'pub unsafe extern fn foo(x: i32, ...)'
+    // @has variadic/fn.foo.html //pre 'pub unsafe extern "C" fn foo(x: i32, ...)'
     pub fn foo(x: i32, ...);
 }
index adc229aaacc54d77955283f760e5509e737aaf53..78e0f7e619b12791e7f52b917eb205959da36f9f 100644 (file)
@@ -4,7 +4,9 @@ error: no method named `count` found for type `std::iter::Filter<std::iter::Fuse
 17 |     once::<&str>("str").fuse().filter(|a: &str| true).count();
    |                                                       ^^^^^
    |
-   = note: the method `count` exists but the following trait bounds were not satisfied: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`, `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
+   = note: the method `count` exists but the following trait bounds were not satisfied:
+           `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`
+           `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
 
 error[E0281]: type mismatch: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53]` implements the trait `for<'r> std::ops::FnMut<(&'r str,)>`, but the trait `for<'r> std::ops::FnMut<(&'r &str,)>` is required
   --> $DIR/issue-36053-2.rs:17:32
diff --git a/src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs
new file mode 100644 (file)
index 0000000..a4eb445
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo;
+
+fn main() {
+    let a: Result<(), Foo> = Ok(());
+    a.unwrap();
+    //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
+    //~| NOTE the method `unwrap` exists but the following trait bounds were not satisfied
+}
diff --git a/src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr
new file mode 100644 (file)
index 0000000..2bd786c
--- /dev/null
@@ -0,0 +1,11 @@
+error: no method named `unwrap` found for type `std::result::Result<(), Foo>` in the current scope
+  --> $DIR/method-help-unsatisfied-bound.rs:15:7
+   |
+15 |     a.unwrap();
+   |       ^^^^^^
+   |
+   = note: the method `unwrap` exists but the following trait bounds were not satisfied:
+           `Foo : std::fmt::Debug`
+
+error: aborting due to previous error
+
index 13d92c64d0153d95dbabeb49b828bbbef4b1bb34..b007d82be9aa82ae69d382b392d91c151df16da2 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 13d92c64d0153d95dbabeb49b828bbbef4b1bb34
+Subproject commit b007d82be9aa82ae69d382b392d91c151df16da2