]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #42037 - nagisa:charpat, r=sfackler
authorMark Simulacrum <mark.simulacrum@gmail.com>
Fri, 19 May 2017 20:16:17 +0000 (14:16 -0600)
committerGitHub <noreply@github.com>
Fri, 19 May 2017 20:16:17 +0000 (14:16 -0600)
Minor optimisation of the string operations

The interesting benchmarks are

<table cellspacing="0" border="0">
<tr>
<th height="17" align="right"><b> (&lt;100% → better than original )</b></th>
<th align="left" sdnum="2057;0;0.00%">Delta</th>
<th align="left">Original ns/i</th>
<th align="left">This Patch ns/i</th>
</tr>
<tr>
<td height="17" align="left">str::contains_bang_char::long_lorem_ipsum</td>
<td align="right" sdval="0.249775381850854" sdnum="2057;0;0.00%">24.98%</td>
<td align="right" sdval="5565" sdnum="2057;">5565</td>
<td align="right" sdval="1390" sdnum="2057;">1390</td>
</tr>
<tr>
<td height="17" align="left">str::contains_bang_char::short_ascii</td>
<td align="right" sdval="0.277777777777778" sdnum="2057;0;0.00%">27.78%</td>
<td align="right" sdval="144" sdnum="2057;">144</td>
<td align="right" sdval="40" sdnum="2057;">40</td>
</tr>
<tr>
<td height="17" align="left">str::contains_bang_char::short_mixed</td>
<td align="right" sdval="0.333333333333333" sdnum="2057;0;0.00%">33.33%</td>
<td align="right" sdval="120" sdnum="2057;">120</td>
<td align="right" sdval="40" sdnum="2057;">40</td>
</tr>
<tr>
<td height="17" align="left">str::contains_bang_char::short_pile_of_poo</td>
<td align="right" sdval="0.594202898550725" sdnum="2057;0;0.00%">59.42%</td>
<td align="right" sdval="69" sdnum="2057;">69</td>
<td align="right" sdval="41" sdnum="2057;">41</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_ascii_char::long_lorem_ipsum</td>
<td align="right" sdval="0.452629649990837" sdnum="2057;0;0.00%">45.26%</td>
<td align="right" sdval="5457" sdnum="2057;">5457</td>
<td align="right" sdval="2470" sdnum="2057;">2470</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_ascii_char::short_ascii</td>
<td align="right" sdval="0.450054684651841" sdnum="2057;0;0.00%">45.01%</td>
<td align="right" sdval="5486" sdnum="2057;">5486</td>
<td align="right" sdval="2469" sdnum="2057;">2469</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_ascii_char::short_mixed</td>
<td align="right" sdval="0.463132795304475" sdnum="2057;0;0.00%">46.31%</td>
<td align="right" sdval="5452" sdnum="2057;">5452</td>
<td align="right" sdval="2525" sdnum="2057;">2525</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_ascii_char::short_pile_of_poo</td>
<td align="right" sdval="0.453426419254088" sdnum="2057;0;0.00%">45.34%</td>
<td align="right" sdval="5443" sdnum="2057;">5443</td>
<td align="right" sdval="2468" sdnum="2057;">2468</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_unichar::long_lorem_ipsum</td>
<td align="right" sdval="0.459385290889133" sdnum="2057;0;0.00%">45.94%</td>
<td align="right" sdval="5466" sdnum="2057;">5466</td>
<td align="right" sdval="2511" sdnum="2057;">2511</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_unichar::short_ascii</td>
<td align="right" sdval="0.460593841642229" sdnum="2057;0;0.00%">46.06%</td>
<td align="right" sdval="5456" sdnum="2057;">5456</td>
<td align="right" sdval="2513" sdnum="2057;">2513</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_unichar::short_mixed</td>
<td align="right" sdval="0.454976303317536" sdnum="2057;0;0.00%">45.50%</td>
<td align="right" sdval="5486" sdnum="2057;">5486</td>
<td align="right" sdval="2496" sdnum="2057;">2496</td>
</tr>
<tr>
<td height="17" align="left">str::ends_with_unichar::short_pile_of_poo</td>
<td align="right" sdval="0.456497797356828" sdnum="2057;0;0.00%">45.65%</td>
<td align="right" sdval="5448" sdnum="2057;">5448</td>
<td align="right" sdval="2487" sdnum="2057;">2487</td>
</tr>
<tr>
<td height="17" align="left">str::find_underscore_char::long_lorem_ipsum</td>
<td align="right" sdval="0.622058559367703" sdnum="2057;0;0.00%">62.21%</td>
<td align="right" sdval="5567" sdnum="2057;">5567</td>
<td align="right" sdval="3463" sdnum="2057;">3463</td>
</tr>
<tr>
<td height="17" align="left">str::find_underscore_char::short_ascii</td>
<td align="right" sdval="0.664383561643836" sdnum="2057;0;0.00%">66.44%</td>
<td align="right" sdval="146" sdnum="2057;">146</td>
<td align="right" sdval="97" sdnum="2057;">97</td>
</tr>
<tr>
<td height="17" align="left">str::find_underscore_char::short_mixed</td>
<td align="right" sdval="0.762295081967213" sdnum="2057;0;0.00%">76.23%</td>
<td align="right" sdval="122" sdnum="2057;">122</td>
<td align="right" sdval="93" sdnum="2057;">93</td>
</tr>
<tr>
<td height="17" align="left">str::find_underscore_str::short_pile_of_poo</td>
<td align="right" sdval="0.476190476190476" sdnum="2057;0;0.00%">47.62%</td>
<td align="right" sdval="252" sdnum="2057;">252</td>
<td align="right" sdval="120" sdnum="2057;">120</td>
</tr>
<tr>
<td height="17" align="left">str::find_zzz_char::long_lorem_ipsum</td>
<td align="right" sdval="0.621655593463818" sdnum="2057;0;0.00%">62.17%</td>
<td align="right" sdval="5569" sdnum="2057;">5569</td>
<td align="right" sdval="3462" sdnum="2057;">3462</td>
</tr>
<tr>
<td height="17" align="left">str::find_zzz_char::short_ascii</td>
<td align="right" sdval="0.666666666666667" sdnum="2057;0;0.00%">66.67%</td>
<td align="right" sdval="147" sdnum="2057;">147</td>
<td align="right" sdval="98" sdnum="2057;">98</td>
</tr>
<tr>
<td height="17" align="left">str::find_zzz_char::short_mixed</td>
<td align="right" sdval="0.754098360655738" sdnum="2057;0;0.00%">75.41%</td>
<td align="right" sdval="122" sdnum="2057;">122</td>
<td align="right" sdval="92" sdnum="2057;">92</td>
</tr>
<tr>
<td height="17" align="left">str::find_zzz_str::long_lorem_ipsum</td>
<td align="right" sdval="0.728648648648649" sdnum="2057;0;0.00%">72.86%</td>
<td align="right" sdval="925" sdnum="2057;">925</td>
<td align="right" sdval="674" sdnum="2057;">674</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_underscore_char::long_lorem_ipsum</td>
<td align="right" sdval="0.3418911335578" sdnum="2057;0;0.00%">34.19%</td>
<td align="right" sdval="7128" sdnum="2057;">7128</td>
<td align="right" sdval="2437" sdnum="2057;">2437</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_underscore_char::short_ascii</td>
<td align="right" sdval="0.373056994818653" sdnum="2057;0;0.00%">37.31%</td>
<td align="right" sdval="193" sdnum="2057;">193</td>
<td align="right" sdval="72" sdnum="2057;">72</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_underscore_char::short_mixed</td>
<td align="right" sdval="0.414201183431953" sdnum="2057;0;0.00%">41.42%</td>
<td align="right" sdval="169" sdnum="2057;">169</td>
<td align="right" sdval="70" sdnum="2057;">70</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_underscore_char::short_pile_of_poo</td>
<td align="right" sdval="0.505050505050505" sdnum="2057;0;0.00%">50.51%</td>
<td align="right" sdval="99" sdnum="2057;">99</td>
<td align="right" sdval="50" sdnum="2057;">50</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_zzz_char::long_lorem_ipsum</td>
<td align="right" sdval="0.341983447888904" sdnum="2057;0;0.00%">34.20%</td>
<td align="right" sdval="7129" sdnum="2057;">7129</td>
<td align="right" sdval="2438" sdnum="2057;">2438</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_zzz_char::short_ascii</td>
<td align="right" sdval="0.371134020618557" sdnum="2057;0;0.00%">37.11%</td>
<td align="right" sdval="194" sdnum="2057;">194</td>
<td align="right" sdval="72" sdnum="2057;">72</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_zzz_char::short_mixed</td>
<td align="right" sdval="0.409356725146199" sdnum="2057;0;0.00%">40.94%</td>
<td align="right" sdval="171" sdnum="2057;">171</td>
<td align="right" sdval="70" sdnum="2057;">70</td>
</tr>
<tr>
<td height="17" align="left">str::rfind_zzz_char::short_pile_of_poo</td>
<td align="right" sdval="0.548076923076923" sdnum="2057;0;0.00%">54.81%</td>
<td align="right" sdval="104" sdnum="2057;">104</td>
<td align="right" sdval="57" sdnum="2057;">57</td>
</tr>
<tr>
<td height="17" align="left">str::trim_right_ascii_char::short_mixed</td>
<td align="right" sdval="1.10416666666667" sdnum="2057;0;0.00%">110.42%</td>
<td align="right" sdval="48" sdnum="2057;">48</td>
<td align="right" sdval="53" sdnum="2057;">53</td>
</tr>
<tr>
<td height="17" align="left">string::bench_from</td>
<td align="right" sdval="1.17241379310345" sdnum="2057;0;0.00%">117.24%</td>
<td align="right" sdval="58" sdnum="2057;">58</td>
<td align="right" sdval="68" sdnum="2057;">68</td>
</tr>
<tr>
<td height="17" align="left">string::bench_from_str</td>
<td align="right" sdval="1.17241379310345" sdnum="2057;0;0.00%">117.24%</td>
<td align="right" sdval="58" sdnum="2057;">58</td>
<td align="right" sdval="68" sdnum="2057;">68</td>
</tr>
<tr>
<td height="17" align="left">string::bench_push_str</td>
<td align="right" sdval="1.11111111111111" sdnum="2057;0;0.00%">111.11%</td>
<td align="right" sdval="54" sdnum="2057;">54</td>
<td align="right" sdval="60" sdnum="2057;">60</td>
</tr>
<tr>
<td height="17" align="left">string::bench_to_string</td>
<td align="right" sdval="1.12068965517241" sdnum="2057;0;0.00%">112.07%</td>
<td align="right" sdval="58" sdnum="2057;">58</td>
<td align="right" sdval="65" sdnum="2057;">65</td>
</tr>
<tr>
<td height="17" align="left">string::from_utf8_lossy_100_invalid</td>
<td align="right" sdval="1.1131458469588" sdnum="2057;0;0.00%">111.31%</td>
<td align="right" sdval="1529" sdnum="2057;">1529</td>
<td align="right" sdval="1702" sdnum="2057;">1702</td>
</tr>
</table>

227 files changed:
.gitmodules
CONTRIBUTING.md
appveyor.yml
configure
src/Cargo.lock
src/Cargo.toml
src/bootstrap/bin/rustc.rs
src/bootstrap/bootstrap.py
src/bootstrap/config.rs
src/bootstrap/config.toml.example
src/bootstrap/dist.rs
src/bootstrap/install.rs
src/bootstrap/lib.rs
src/bootstrap/step.rs
src/ci/docker/README.md
src/ci/docker/android-ndk.sh [deleted file]
src/ci/docker/arm-android/Dockerfile
src/ci/docker/arm-android/install-ndk.sh [deleted file]
src/ci/docker/arm-android/install-sdk.sh [deleted file]
src/ci/docker/arm-android/start-emulator.sh [deleted file]
src/ci/docker/armhf-gnu/Dockerfile
src/ci/docker/cross/Dockerfile
src/ci/docker/disabled/dist-aarch64-android/Dockerfile
src/ci/docker/disabled/dist-armv7-android/Dockerfile
src/ci/docker/disabled/dist-i686-android/Dockerfile
src/ci/docker/disabled/dist-x86_64-android/Dockerfile
src/ci/docker/dist-aarch64-linux/Dockerfile
src/ci/docker/dist-android/Dockerfile
src/ci/docker/dist-android/install-ndk.sh [deleted file]
src/ci/docker/dist-arm-linux/Dockerfile
src/ci/docker/dist-armhf-linux/Dockerfile
src/ci/docker/dist-armv7-linux/Dockerfile
src/ci/docker/dist-fuchsia/Dockerfile
src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile
src/ci/docker/dist-i686-freebsd/Dockerfile
src/ci/docker/dist-i686-linux/Dockerfile
src/ci/docker/dist-powerpc-linux/Dockerfile
src/ci/docker/dist-powerpc64-linux/Dockerfile
src/ci/docker/dist-powerpc64le-linux/Dockerfile
src/ci/docker/dist-s390x-linux/Dockerfile
src/ci/docker/dist-x86_64-freebsd/Dockerfile
src/ci/docker/dist-x86_64-linux/Dockerfile
src/ci/docker/dist-x86_64-musl/Dockerfile
src/ci/docker/dist-x86_64-netbsd/Dockerfile
src/ci/docker/emscripten/Dockerfile
src/ci/docker/run.sh
src/ci/docker/scripts/android-ndk.sh [new file with mode: 0644]
src/ci/docker/scripts/android-sdk.sh [new file with mode: 0644]
src/ci/docker/scripts/android-start-emulator.sh [new file with mode: 0755]
src/ci/docker/scripts/dumb-init.sh [new file with mode: 0644]
src/ci/docker/scripts/sccache.sh [new file with mode: 0644]
src/doc/book
src/doc/reference
src/doc/unstable-book/src/SUMMARY.md
src/doc/unstable-book/src/language-features/advanced-slice-patterns.md
src/doc/unstable-book/src/language-features/asm.md
src/doc/unstable-book/src/language-features/box-patterns.md
src/doc/unstable-book/src/language-features/box-syntax.md
src/doc/unstable-book/src/language-features/global_asm.md
src/doc/unstable-book/src/language-features/loop-break-value.md
src/doc/unstable-book/src/language-features/plugin-registrar.md
src/doc/unstable-book/src/language-features/plugin.md
src/doc/unstable-book/src/language-features/proc-macro.md
src/doc/unstable-book/src/language-features/slice-patterns.md
src/doc/unstable-book/src/library-features/alloc-jemalloc.md
src/doc/unstable-book/src/library-features/alloc-system.md
src/doc/unstable-book/src/library-features/vec-resize-default.md [new file with mode: 0644]
src/libcollections/vec.rs
src/libcore/fmt/float.rs
src/libcore/num/flt2dec/mod.rs
src/libcore/num/wrapping.rs
src/libcore/ops.rs
src/libproc_macro_plugin/quote.rs
src/librustc/Cargo.toml
src/librustc/dep_graph/dep_node.rs
src/librustc/diagnostics.rs
src/librustc/hir/map/mod.rs
src/librustc/ich/fingerprint.rs
src/librustc/ich/hcx.rs
src/librustc/ich/mod.rs
src/librustc/infer/combine.rs
src/librustc/lib.rs
src/librustc/lint/builtin.rs
src/librustc/lint/context.rs
src/librustc/middle/cstore.rs
src/librustc/session/config.rs
src/librustc/session/mod.rs
src/librustc/traits/mod.rs
src/librustc/traits/object_safety.rs
src/librustc/traits/project.rs
src/librustc/traits/specialize/mod.rs
src/librustc/traits/specialize/specialization_graph.rs
src/librustc/ty/maps.rs
src/librustc/ty/mod.rs
src/librustc/ty/trait_def.rs
src/librustc/util/common.rs
src/librustc_driver/Cargo.toml
src/librustc_driver/driver.rs
src/librustc_driver/lib.rs
src/librustc_driver/target_features.rs
src/librustc_driver/test.rs
src/librustc_incremental/calculate_svh/mod.rs
src/librustc_incremental/persist/preds/compress/test_macro.rs
src/librustc_lint/lib.rs
src/librustc_metadata/Cargo.toml
src/librustc_metadata/creader.rs
src/librustc_metadata/cstore.rs
src/librustc_metadata/cstore_impl.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/lib.rs
src/librustc_metadata/locator.rs
src/librustc_metadata/schema.rs
src/librustc_mir/util/pretty.rs
src/librustc_plugin/registry.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/macros.rs
src/librustc_trans/Cargo.toml
src/librustc_trans/back/archive.rs
src/librustc_trans/back/link.rs
src/librustc_trans/base.rs
src/librustc_trans/lib.rs
src/librustc_trans/llvm_util.rs [new file with mode: 0644]
src/librustc_trans/metadata.rs [new file with mode: 0644]
src/librustc_typeck/check/method/suggest.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/coherence/builtin.rs
src/librustc_typeck/coherence/mod.rs
src/librustc_typeck/coherence/overlap.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/diagnostics.rs
src/librustdoc/core.rs
src/librustdoc/html/format.rs
src/librustdoc/html/static/rustdoc.css
src/librustdoc/test.rs
src/libstd/process.rs
src/libstd/sys/windows/ext/ffi.rs
src/libstd/sys/windows/ext/fs.rs
src/libstd/sys/windows/ext/mod.rs
src/libstd/sys_common/wtf8.rs
src/libstd/thread/local.rs
src/libstd/thread/mod.rs
src/libsyntax/ast.rs
src/libsyntax/attr.rs
src/libsyntax/codemap.rs
src/libsyntax/config.rs
src/libsyntax/diagnostics/plugin.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/source_util.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/feature_gate.rs
src/libsyntax/json.rs
src/libsyntax/parse/attr.rs
src/libsyntax/parse/classify.rs
src/libsyntax/parse/common.rs
src/libsyntax/parse/lexer/comments.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/obsolete.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pp.rs
src/libsyntax/print/pprust.rs
src/libsyntax/std_inject.rs
src/libsyntax/test.rs
src/libsyntax/tokenstream.rs
src/libsyntax/util/lev_distance.rs
src/libsyntax/util/move_map.rs
src/libsyntax/visit.rs
src/libtest/lib.rs
src/rust-installer [deleted submodule]
src/test/compile-fail-fulldeps/proc-macro/resolve-error.rs
src/test/compile-fail/coherence-inherited-assoc-ty-cycle-err.rs [new file with mode: 0644]
src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs
src/test/compile-fail/feature-gate-allow-internal-unstable.rs
src/test/compile-fail/invalid-macro-matcher.rs
src/test/compile-fail/issue-21356.rs
src/test/compile-fail/issue-39388.rs
src/test/compile-fail/issue-39404.rs
src/test/compile-fail/issue-41776.rs [new file with mode: 0644]
src/test/compile-fail/issue-5067.rs
src/test/compile-fail/macro-expansion-tests.rs
src/test/compile-fail/macro-follow.rs
src/test/compile-fail/macro-followed-by-seq-bad.rs
src/test/compile-fail/macro-input-future-proofing.rs
src/test/compile-fail/macro-shadowing.rs
src/test/compile-fail/method-help-unsatisfied-bound.rs [deleted file]
src/test/compile-fail/unused-macro-with-bad-frag-spec.rs
src/test/compile-fail/unused-macro-with-follow-violation.rs
src/test/compile-fail/unused-macro.rs [new file with mode: 0644]
src/test/compile-fail/user-defined-macro-rules.rs
src/test/incremental/add_private_fn_at_krate_root_cc/auxiliary/point.rs
src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs
src/test/incremental/change_private_fn_cc/auxiliary/point.rs
src/test/incremental/change_private_impl_method_cc/auxiliary/point.rs
src/test/incremental/remapped_paths_cc/auxiliary/extern_crate.rs
src/test/incremental/remapped_paths_cc/main.rs
src/test/incremental/remove-private-item-cross-crate/auxiliary/a.rs
src/test/incremental/rlib_cross_crate/auxiliary/a.rs
src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs
src/test/incremental/type_alias_cross_crate/auxiliary/a.rs
src/test/mir-opt/issue-41697.rs [new file with mode: 0644]
src/test/run-make/issue-19371/foo.rs
src/test/run-make/llvm-pass/plugin.rs
src/test/run-pass-fulldeps/issue-35829.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/macro-quote-1.rs
src/test/run-pass/issue-41697.rs [deleted file]
src/test/run-pass/issue-41803.rs [new file with mode: 0644]
src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs [new file with mode: 0644]
src/test/run-pass/specialization/assoc-ty-graph-cycle.rs [new file with mode: 0644]
src/test/rustdoc/extern-impl.rs
src/test/rustdoc/ffi.rs
src/test/rustdoc/issue-22038.rs
src/test/rustdoc/variadic.rs
src/test/ui/mismatched_types/issue-36053-2.stderr
src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs [new file with mode: 0644]
src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr [new file with mode: 0644]
src/tools/cargo
src/tools/linkchecker/main.rs
src/tools/rust-installer [new submodule]
src/tools/tidy/src/deps.rs
src/tools/tidy/src/main.rs

index 7cd896b763f543fe87345089b5432036b6c24952..1ef3c086a1c232566edfb66c4bcd2b3836542530 100644 (file)
@@ -13,7 +13,7 @@
        path = src/jemalloc
        url = https://github.com/rust-lang/jemalloc.git
 [submodule "src/rust-installer"]
-       path = src/rust-installer
+       path = src/tools/rust-installer
        url = https://github.com/rust-lang/rust-installer.git
 [submodule "src/liblibc"]
        path = src/liblibc
@@ -23,7 +23,7 @@
        url = https://github.com/rust-lang-nursery/nomicon.git
 [submodule "src/tools/cargo"]
        path = src/tools/cargo
-       url = https://github.com/rust-lang/cargo
+       url = https://github.com/rust-lang/cargo.git
 [submodule "reference"]
        path = src/doc/reference
        url = https://github.com/rust-lang-nursery/reference.git
@@ -32,4 +32,4 @@
        url = https://github.com/rust-lang/book.git
 [submodule "src/tools/rls"]
        path = src/tools/rls
-       url = https://github.com/rust-lang-nursery/rls
+       url = https://github.com/rust-lang-nursery/rls.git
index 0314a5dfd8d02a5b283d317ef83cadad445fd8ce..8f121f8d6ed9b54434c5f1bb4e23a149a42381d6 100644 (file)
@@ -177,7 +177,7 @@ python x.py test src/test/rustdoc
 python x.py build src/libcore --stage 0
 ```
 
-You can explore the build system throught the various `--help` pages for each
+You can explore the build system through the various `--help` pages for each
 subcommand. For example to learn more about a command you can run:
 
 ```
index 42c6256a95a8cc04fef17140dd39e02b0d499e9f..96de1d90f25e623319f3defd8e2e5e2c0cceb2fe 100644 (file)
@@ -141,9 +141,9 @@ install:
   - set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH%
 
   # Help debug some handle issues on AppVeyor
-  - ps: Invoke-WebRequest -Uri https://download.sysinternals.com/files/Handle.zip -OutFile handle.zip
+  - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-15-Handle.zip
   - mkdir handle
-  - ps: Expand-Archive handle.zip -dest handle
+  - 7z x -ohandle 2017-05-15-Handle.zip
   - set PATH=%PATH%;%CD%\handle
   - handle.exe -accepteula -help
 
index db41f0dfb94f388be9b8c405fa34def58bf29b62..af59d5b0bb88977d2c512af7fa1f1c096ed398ea 100755 (executable)
--- a/configure
+++ b/configure
@@ -519,6 +519,7 @@ valopt_nosave host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples"
 valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples"
 valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH"
 valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH"
+valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries"
 
 # On Windows this determines root of the subtree for target libraries.
 # Host runtime libs always go to 'bin'.
@@ -710,6 +711,7 @@ envopt LDFLAGS
 CFG_PREFIX=${CFG_PREFIX%/}
 CFG_MANDIR=${CFG_MANDIR%/}
 CFG_DOCDIR=${CFG_DOCDIR%/}
+CFG_BINDIR=${CFG_BINDIR%/}
 CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
 CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
 
@@ -750,6 +752,7 @@ putvar CFG_X86_64_LINUX_ANDROID_NDK
 putvar CFG_NACL_CROSS_PATH
 putvar CFG_MANDIR
 putvar CFG_DOCDIR
+putvar CFG_BINDIR
 putvar CFG_USING_LIBCPP
 
 msg
index 776a268aa8de19fffaa826970bc303d8fe45b073..804fd5807735f619c6cc62a7e01fd4665f3e2602 100644 (file)
@@ -6,6 +6,23 @@ dependencies = [
  "libc 0.0.0",
 ]
 
+[[package]]
+name = "advapi32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "aho-corasick"
 version = "0.6.3"
@@ -49,20 +66,38 @@ name = "arena"
 version = "0.0.0"
 
 [[package]]
-name = "atty"
-version = "0.2.2"
+name = "backtrace"
+version = "0.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
+ "backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "backtrace-sys"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "bitflags"
 version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "bitflags"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "bitflags"
 version = "0.8.2"
@@ -77,17 +112,22 @@ dependencies = [
  "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
  "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "bufstream"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "build-manifest"
 version = "0.1.0"
 dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -98,23 +138,110 @@ dependencies = [
  "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "cargo"
+version = "0.20.0"
+source = "git+https://github.com/rust-lang/cargo#2b32084293d8da63b48de56363a0f2e986ec3367"
+replace = "cargo 0.20.0"
+
+[[package]]
+name = "cargo"
+version = "0.20.0"
+dependencies = [
+ "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargotest 0.1.0",
+ "chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crates-io 0.9.0",
+ "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "cargotest"
+version = "0.1.0"
+dependencies = [
+ "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargo 0.20.0",
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "cargotest2"
 version = "0.1.0"
 
+[[package]]
+name = "cfg-if"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "chrono"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "clap"
-version = "2.22.1"
+version = "2.19.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -151,23 +278,115 @@ dependencies = [
  "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "core"
 version = "0.0.0"
 
+[[package]]
+name = "crates-io"
+version = "0.9.0"
+dependencies = [
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "curl"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "curl-sys"
+version = "0.3.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "dbghelp-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "derive-new"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "diff"
 version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "docopt"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "dtoa"
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "either"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "enum_primitive"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "env_logger"
 version = "0.4.2"
@@ -177,6 +396,14 @@ dependencies = [
  "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "error-chain"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "error_index_generator"
 version = "0.0.0"
@@ -186,7 +413,7 @@ name = "filetime"
 version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -197,15 +424,48 @@ dependencies = [
  "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "flate2"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "fmt_macros"
 version = "0.0.0"
 
+[[package]]
+name = "foreign-types"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "fs2"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "gcc"
 version = "0.3.46"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "gdi32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "getopts"
 version = "0.0.0"
@@ -215,22 +475,93 @@ name = "getopts"
 version = "0.2.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "git2"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "git2-curl"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "glob"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "graphviz"
 version = "0.0.0"
 
+[[package]]
+name = "hamcrest"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "handlebars"
-version = "0.25.2"
+version = "0.25.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "idna"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "installer"
+version = "0.0.0"
+dependencies = [
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "itertools"
+version = "0.5.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -247,9 +578,22 @@ dependencies = [
  "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "languageserver-types"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "lazy_static"
-version = "0.2.5"
+version = "0.2.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -261,9 +605,46 @@ dependencies = [
 
 [[package]]
 name = "libc"
-version = "0.2.21"
+version = "0.2.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "libgit2-sys"
+version = "0.6.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libssh2-sys"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libz-sys"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "linkchecker"
 version = "0.1.0"
@@ -273,21 +654,44 @@ name = "log"
 version = "0.3.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "lzma-sys"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "matches"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "mdbook"
 version = "0.0.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "memchr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -295,7 +699,105 @@ name = "memchr"
 version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "miniz-sys"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "miow"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "multimap"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "net2"
+version = "0.2.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-complex"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-iter"
+version = "0.1.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-rational"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -308,7 +810,7 @@ name = "num_cpus"
 version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -316,6 +818,43 @@ name = "open"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "openssl"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "owning_ref"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "panic_abort"
 version = "0.0.0"
@@ -339,6 +878,11 @@ name = "pest"
 version = "0.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "pkg-config"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "proc_macro"
 version = "0.0.0"
@@ -355,6 +899,15 @@ dependencies = [
  "syntax_pos 0.0.0",
 ]
 
+[[package]]
+name = "psapi-sys"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "pulldown-cmark"
 version = "0.0.8"
@@ -374,8 +927,32 @@ dependencies = [
 
 [[package]]
 name = "quick-error"
-version = "1.1.0"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "quote"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "quote"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "racer"
+version = "2.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
 
 [[package]]
 name = "rand"
@@ -384,6 +961,31 @@ dependencies = [
  "core 0.0.0",
 ]
 
+[[package]]
+name = "rand"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "regex"
+version = "0.1.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "regex"
 version = "0.2.1"
@@ -396,6 +998,11 @@ dependencies = [
  "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "regex-syntax"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "regex-syntax"
 version = "0.4.0"
@@ -409,13 +1016,48 @@ version = "0.1.0"
 name = "remote-test-server"
 version = "0.1.0"
 
+[[package]]
+name = "rls"
+version = "0.1.0"
+dependencies = [
+ "cargo 0.20.0 (git+https://github.com/rust-lang/cargo)",
+ "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rls-analysis"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "rls-data"
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -423,14 +1065,25 @@ name = "rls-span"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rls-vfs"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "rustbook"
 version = "0.1.0"
 dependencies = [
- "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -442,17 +1095,22 @@ dependencies = [
  "fmt_macros 0.0.0",
  "graphviz 0.0.0",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc_back 0.0.0",
  "rustc_bitflags 0.0.0",
  "rustc_const_math 0.0.0",
  "rustc_data_structures 0.0.0",
  "rustc_errors 0.0.0",
- "rustc_llvm 0.0.0",
  "serialize 0.0.0",
  "syntax 0.0.0",
  "syntax_pos 0.0.0",
 ]
 
+[[package]]
+name = "rustc-demangle"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "rustc-main"
 version = "0.0.0"
@@ -464,7 +1122,7 @@ dependencies = [
 
 [[package]]
 name = "rustc-serialize"
-version = "0.3.23"
+version = "0.3.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -552,7 +1210,6 @@ dependencies = [
  "rustc_errors 0.0.0",
  "rustc_incremental 0.0.0",
  "rustc_lint 0.0.0",
- "rustc_llvm 0.0.0",
  "rustc_metadata 0.0.0",
  "rustc_mir 0.0.0",
  "rustc_passes 0.0.0",
@@ -626,13 +1283,13 @@ version = "0.0.0"
 dependencies = [
  "flate 0.0.0",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "proc_macro 0.0.0",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
  "rustc_const_math 0.0.0",
  "rustc_data_structures 0.0.0",
  "rustc_errors 0.0.0",
- "rustc_llvm 0.0.0",
  "serialize 0.0.0",
  "syntax 0.0.0",
  "syntax_ext 0.0.0",
@@ -722,7 +1379,7 @@ dependencies = [
  "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc_typeck 0.0.0",
  "syntax 0.0.0",
  "syntax_pos 0.0.0",
@@ -734,6 +1391,7 @@ version = "0.0.0"
 dependencies = [
  "flate 0.0.0",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
  "rustc_bitflags 0.0.0",
@@ -800,26 +1458,128 @@ dependencies = [
  "syntax_pos 0.0.0",
 ]
 
+[[package]]
+name = "rustfmt"
+version = "0.8.4"
+source = "git+https://github.com/rust-lang-nursery/rustfmt#bf9b3fa1d7cab2f7bd541539d397a92b4954ec96"
+dependencies = [
+ "diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "same-file"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "serde"
+version = "0.9.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "serde"
-version = "0.9.11"
+version = "1.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "serde_derive"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_derive_internals"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_ignored"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_json"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "serde_json"
-version = "0.9.9"
+version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "serialize"
 version = "0.0.0"
 
+[[package]]
+name = "shell-escape"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "std"
 version = "0.0.0"
@@ -851,11 +1611,51 @@ dependencies = [
  "core 0.0.0",
 ]
 
+[[package]]
+name = "strings"
+version = "0.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "strsim"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "strsim"
 version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "syn"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syn"
+version = "0.11.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "synom"
+version = "0.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "syntax"
 version = "0.0.0"
@@ -887,17 +1687,113 @@ dependencies = [
  "serialize 0.0.0",
 ]
 
+[[package]]
+name = "syntex_errors"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_errors"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_pos"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_pos"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_syntax"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_syntax"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tar"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tempdir"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "term"
 version = "0.0.0"
 
+[[package]]
+name = "term"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "term_size"
 version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
  "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -911,11 +1807,28 @@ dependencies = [
 
 [[package]]
 name = "thread-id"
-version = "3.0.0"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread-id"
+version = "3.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread_local"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -923,7 +1836,7 @@ name = "thread_local"
 version = "0.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -931,25 +1844,75 @@ dependencies = [
 name = "tidy"
 version = "0.1.0"
 
+[[package]]
+name = "time"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "toml"
 version = "0.1.30"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "toml"
-version = "0.3.1"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "toml"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "toml"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "typed-arena"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-bidi"
+version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "unicode-normalization"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "unicode-segmentation"
-version = "1.1.0"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -957,6 +1920,16 @@ name = "unicode-width"
 version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "unicode-xid"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-xid"
+version = "0.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "unreachable"
 version = "0.1.1"
@@ -965,6 +1938,38 @@ dependencies = [
  "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "url"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "url_serde"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "user32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "utf8-ranges"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "utf8-ranges"
 version = "1.0.0"
@@ -972,7 +1977,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "vec_map"
-version = "0.7.0"
+version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -980,6 +1985,16 @@ name = "void"
 version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "walkdir"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "winapi"
 version = "0.2.8"
@@ -990,53 +2005,189 @@ name = "winapi-build"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "ws2_32-sys"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "xattr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "xz2"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "yaml-rust"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [metadata]
+"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"
+"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
 "checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"
 "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
-"checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"
+"checksum backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f551bc2ddd53aea015d453ef0b635af89444afa5ed2405dd0b2062ad5d600d80"
+"checksum backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d192fd129132fbc97497c1f2ec2c2c5174e376b95f535199ef4fe0a293d33842"
 "checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
+"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
 "checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4"
-"checksum clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e17a4a72ffea176f77d6e2db609c6c919ef221f23862c9915e687fb54d833485"
+"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32"
+"checksum cargo 0.20.0 (git+https://github.com/rust-lang/cargo)" = "<none>"
+"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
+"checksum chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d9123be86fd2a8f627836c235ecdf331fdd067ecf7ac05aa1a68fbcf2429f056"
+"checksum clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)" = "95b78f3fe0fc94c13c731714363260e04b557a637166f33a4570d3189d642374"
 "checksum cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)" = "92278eb79412c8f75cfc89e707a1bb3a6490b68f7f2e78d15c774f30fe701122"
+"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
+"checksum curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c90e1240ef340dd4027ade439e5c7c2064dd9dc652682117bd50d1486a3add7b"
+"checksum curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "23e7e544dc5e1ba42c4a4a678bd47985e84b9c3f4d3404c29700622a029db9c3"
+"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850"
+"checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e"
 "checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472"
+"checksum docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab32ea6e284d87987066f21a9e809a73c14720571ef34516f0890b3d355ccfd8"
 "checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
+"checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
+"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
+"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
 "checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
+"checksum error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9435d864e017c3c6afeac1654189b06cdb491cf2ff73dbf0d73b0f292f42ff8"
 "checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
+"checksum flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)" = "36df0166e856739905cd3d7e0b210fe818592211a008862599845e012d8d304c"
+"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d"
+"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf"
 "checksum gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)" = "181e3cebba1d663bd92eb90e2da787e10597e027eb00de8d742b260a7850948f"
+"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
 "checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
-"checksum handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)" = "663e1728d8037fb0d4e13bcd1b1909fb5d913690a9929eb385922df157c2ff8f"
+"checksum git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "9de9df4358c17e448a778d90cd0272e1dab5eae30244502333fa2001c4e24357"
+"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e"
+"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
+"checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4"
+"checksum handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)" = "15bdf598fc3c2de40c6b340213028301c0d225eea55a2294e6cc148074e557a1"
+"checksum idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6ac85ec3f80c8e4e99d9325521337e14ec7555c458a14e377d189659a427f375"
+"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc"
 "checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
 "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
-"checksum lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4732c563b9a21a406565c4747daa7b46742f082911ae4753f390dc9ec7ee1a97"
-"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
+"checksum languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97c2985bfcbbcb0189cfa25e1c10c1ac7111df2b6214b652c690127aefdf4e5b"
+"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
+"checksum libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)" = "babb8281da88cba992fa1f4ddec7d63ed96280a1a53ec9b919fd37b53d71e502"
+"checksum libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dd89dd7196d5fa35b659c3eaf3c1b14b9bd961bfd1a07dfca49adeb8a6aa3763"
+"checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75"
+"checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c"
 "checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
+"checksum lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "fedff6a5cbb24494ec6ee4784e9ac5c187161fede04c7767d49bf87544013afa"
+"checksum matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efd7622e3022e1a6eaa602c4cea8912254e5582c9c692e9167714182244801b1"
 "checksum mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "f1e2e9d848514dcfad4195788d0d42ae5153a477c191d75d5b84fab10f222fbd"
+"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
 "checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
+"checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726"
+"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
+"checksum multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9223f4774d08e06185e44e555b9a7561243d387bac49c78a6205c42d6975fbf2"
+"checksum net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "bc01404e7568680f1259aa5729539f221cb1e6d047a0d9053cab4be8a73b5d67"
+"checksum num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "98b15ba84e910ea7a1973bccd3df7b31ae282bf9d8bd2897779950c9b8303d40"
+"checksum num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ba6d838b16e56da1b6c383d065ff1ec3c7d7797f65a3e8f6ba7092fd87820bac"
+"checksum num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "148eb324ca772230853418731ffdf13531738b50f89b30692a01fcdcb0a64677"
+"checksum num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "ef1a4bf6f9174aa5783a9b4cc892cacd11aebad6c69ad027a0b65c6ca5f8aa37"
+"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e"
+"checksum num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "c2dc5ea04020a8f18318ae485c751f8cfa1c0e69dcf465c29ddaaa64a313cc44"
 "checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99"
 "checksum num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca313f1862c7ec3e0dfe8ace9fa91b1d9cb5c84ace3d00f5ec4216238e93c167"
 "checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
+"checksum openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "bb5d1663b73d10c6a3eda53e2e9d0346f822394e7b858d7257718f65f61dfbe2"
+"checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf"
+"checksum openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "3a5886d87d3e2a0d890bf62dc8944f5e3769a405f7e1e9ef6e517e47fd7a0897"
+"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
 "checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
+"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
+"checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478"
 "checksum pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9ab1e588ef8efd702c7ed9d2bd774db5e6f4d878bb5a1a9f371828fbdff6973"
 "checksum pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1058d7bb927ca067656537eec4e02c2b4b70eaaa129664c5b90c111e20326f41"
-"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
+"checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469"
+"checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504"
+"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
+"checksum racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b0d72b3afd67882adfca61d609fafb8d7aa5f9e814f12c32fcc6e171995920e8"
+"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
+"checksum redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "29dbdfd4b9df8ab31dec47c6087b7b13cbf4a776f335e4de8efba8288dda075b"
+"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
 "checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
+"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
 "checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
+"checksum rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a62d88c341375c6f3f8b2e18b9b364896e7d3e7aa916907de717d0267e116506"
 "checksum rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fc4277ce3c57f456b11fe3145b181a844a25201bab5cbaa1978457e6e2f27d47"
 "checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
-"checksum rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "684ce48436d6465300c9ea783b6b14c4361d6b8dcbb1375b486a69cc19e2dfb0"
-"checksum serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)" = "a702319c807c016e51f672e5c77d6f0b46afddd744b5e437d6b8436b888b458f"
-"checksum serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)" = "dbc45439552eb8fb86907a2c41c1fd0ef97458efb87ff7f878db466eb581824e"
+"checksum rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "986eada111517bcb5a7a75205b3f2b70c82e7766653cca61a23f5afce79bdb94"
+"checksum rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3058a43ada2c2d0b92b3ae38007a2d0fa5e9db971be260e0171408a4ff471c95"
+"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
+"checksum rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)" = "<none>"
+"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
+"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
+"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+"checksum serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34b623917345a631dc9608d5194cc206b3fe6c3554cd1c75b937e55e285254af"
+"checksum serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "38a3db3a5757f68069aba764b793823ea9fb9717c42c016f8903f8add50f508a"
+"checksum serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e46ef71ee001a4279a4513e79a6ebbb59da3a4987bf77a6df2e5534cd6f21d82"
+"checksum serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "021c338d22c7e30f957a6ab7e388cb6098499dda9fd4ba1661ee074ca7a180d1"
+"checksum serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c10e798e4405d7dcec3658989e35ee6706f730a9ed7c1184d5ebd84317e82f46"
+"checksum serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8bcf487be7d2e15d3d543f04312de991d631cfe1b43ea0ade69e6a8a5b16a1"
+"checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b"
+"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8"
+"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
+"checksum strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "54f86446ab480b4f60782188f4f78886465c5793aee248cbb48b7fdc0d022420"
+"checksum strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "67f84c44fbb2f91db7fef94554e6b2ac05909c9c0b0bc23bb98d3a1aebfe7f7c"
 "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
+"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
+"checksum syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6ae6fb0dcc9bd85f89a1a4adc0df2fd90c90c98849d61433983dd7a9df6363f7"
+"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
+"checksum syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9e52bffe6202cfb67587784cf23e0ec5bf26d331eef4922a16d5c42e12aa1e9b"
+"checksum syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "867cc5c2d7140ae7eaad2ae9e8bf39cb18a67ca651b7834f88d46ca98faadb9c"
+"checksum syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "955ef4b16af4c468e4680d1497f873ff288f557d338180649e18f915af5e15ac"
+"checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047"
+"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde"
+"checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791"
+"checksum tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ab0ef9ead2fe0aa9e18475a96a207bfd5143f4124779ef7429503a8665416ce8"
+"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
+"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
 "checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
-"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
+"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
+"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"
+"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
 "checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
+"checksum time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd7ccbf969a892bf83f1e441126968a07a3941c24ff522a26af9f9f4585d1a3"
 "checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
-"checksum toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3474f3c6eaf32eedb4f4a66a26214f020f828a6d96c37e38a35e3a379bbcfd11"
-"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
+"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
+"checksum toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bd86ad9ebee246fdedd610e0f6d0587b754a3d81438db930a244d0480ed7878f"
+"checksum toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc5dbfb20a481e64b99eb7ae280859ec76730c7191570ba5edaa962394edb0a"
+"checksum typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8e2f9dc90da4f9d66ffc9ad3ead2c7d57582a26f4a3292d2ce7011bd29965100"
+"checksum unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a078ebdd62c0e71a709c3d53d2af693fe09fe93fbff8344aebe289b78f9032"
+"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
+"checksum unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c3bc443ded17b11305ffffe6b37e2076f328a5a8cb6aa877b1b98f77699e98b5"
+"checksum unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a8083c594e02b8ae1654ae26f0ade5158b119bd88ad0e8227a5d8fcd72407946"
 "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
+"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
+"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
 "checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
+"checksum url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5ba8a749fb4479b043733416c244fa9d1d3af3d7c23804944651c8a448cb87e"
+"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
+"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47"
+"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
 "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
-"checksum vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8cdc8b93bd0198ed872357fb2e667f7125646b1762f16d60b2c96350d361897"
+"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f"
 "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
+"checksum walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "bb08f9e670fab86099470b97cd2b252d6527f0b3cc1401acdb595ffc9dd288ff"
 "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
 "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
+"checksum xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "5f04de8a1346489a2f9e9bd8526b73d135ec554227b17568456e86aa35b6f3fc"
+"checksum xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9510bdf100731599107c61f77daf46713a69a568f75458999c1f9dbf6ba25b0"
+"checksum yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e66366e18dc58b46801afbf2ca7661a9f59cc8c5962c29892b6039b4f86fa992"
index 9aca3e134d6555aa6cd220675174267d3596cf11..85a6df3573ae120f377870a59a5162b3188c809f 100644 (file)
@@ -13,10 +13,7 @@ members = [
   "tools/build-manifest",
   "tools/remote-test-client",
   "tools/remote-test-server",
-]
-
-# These projects have their own Cargo.lock
-exclude = [
+  "tools/rust-installer",
   "tools/cargo",
   "tools/rls",
 ]
@@ -37,3 +34,6 @@ debug-assertions = false
 [profile.test]
 debug = false
 debug-assertions = false
+
+[replace]
+"https://github.com/rust-lang/cargo#0.20.0" = { path = "tools/cargo" }
index 906c468241ae9d765fc006a8fb331c905bbd11d6..eb2cef133a34f2401b014f7a9b545bb7535845ab 100644 (file)
 use std::process::{Command, ExitStatus};
 
 fn main() {
-    let args = env::args_os().skip(1).collect::<Vec<_>>();
+    let mut args = env::args_os().skip(1).collect::<Vec<_>>();
+
+    // Append metadata suffix for internal crates. See the corresponding entry
+    // in bootstrap/lib.rs for details.
+    if let Ok(s) = env::var("RUSTC_METADATA_SUFFIX") {
+        for i in 1..args.len() {
+            // Dirty code for borrowing issues
+            let mut new = None;
+            if let Some(current_as_str) = args[i].to_str() {
+                if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) ||
+                   current_as_str.starts_with("-Cmetadata") {
+                    new = Some(format!("{}-{}", current_as_str, s));
+                }
+            }
+            if let Some(new) = new { args[i] = new.into(); }
+        }
+    }
+
     // Detect whether or not we're a build script depending on whether --target
     // is passed (a bit janky...)
     let target = args.windows(2)
index ad3cf31c1b921b50d41e0d256958eb22b841f241..e15304a7e6e67b318ce93837d32769082b774c54 100644 (file)
@@ -14,6 +14,7 @@ import contextlib
 import datetime
 import hashlib
 import os
+import re
 import shutil
 import subprocess
 import sys
@@ -126,13 +127,13 @@ def unpack(tarball, dst, verbose=False, match=None):
             shutil.move(tp, fp)
     shutil.rmtree(os.path.join(dst, fname))
 
-def run(args, verbose=False, exception=False):
+def run(args, verbose=False, exception=False, cwd=None):
     if verbose:
         print("running: " + ' '.join(args))
     sys.stdout.flush()
     # Use Popen here instead of call() as it apparently allows powershell on
     # Windows to not lock up waiting for input presumably.
-    ret = subprocess.Popen(args)
+    ret = subprocess.Popen(args, cwd=cwd)
     code = ret.wait()
     if code != 0:
         err = "failed to run: " + ' '.join(args)
@@ -297,8 +298,10 @@ class RustBuild(object):
 
     def get_toml(self, key):
         for line in self.config_toml.splitlines():
-            if line.startswith(key + ' ='):
-                return self.get_string(line)
+            match = re.match(r'^{}\s*=(.*)$'.format(key), line)
+            if match is not None:
+                value = match.group(1)
+                return self.get_string(value) or value.strip()
         return None
 
     def get_mk(self, key):
@@ -329,6 +332,8 @@ class RustBuild(object):
 
     def get_string(self, line):
         start = line.find('"')
+        if start == -1:
+            return None
         end = start + 1 + line[start + 1:].find('"')
         return line[start + 1:end]
 
@@ -386,12 +391,21 @@ class RustBuild(object):
             args.append("--frozen")
         self.run(args, env)
 
-    def run(self, args, env):
-        proc = subprocess.Popen(args, env=env)
+    def run(self, args, env=None, cwd=None):
+        proc = subprocess.Popen(args, env=env, cwd=cwd)
         ret = proc.wait()
         if ret != 0:
             sys.exit(ret)
 
+    def output(self, args, env=None, cwd=None):
+        proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env, cwd=cwd)
+        (out, err) = proc.communicate()
+        ret = proc.wait()
+        if ret != 0:
+            print(out)
+            sys.exit(ret)
+        return out
+
     def build_triple(self):
         default_encoding = sys.getdefaultencoding()
         config = self.get_toml('build')
@@ -529,6 +543,54 @@ class RustBuild(object):
 
         return "{}-{}".format(cputype, ostype)
 
+    def update_submodules(self):
+        if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \
+            self.get_toml('submodules') == "false" or \
+            self.get_mk('CFG_DISABLE_MANAGE_SUBMODULES') == "1":
+            return
+
+        print('Updating submodules')
+        output = self.output(["git", "submodule", "status"], cwd=self.rust_root)
+        submodules = []
+        for line in output.splitlines():
+            # NOTE `git submodule status` output looks like this:
+            #
+            # -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
+            # +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
+            #  e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
+            #
+            # The first character can be '-', '+' or ' ' and denotes the
+            # `State` of the submodule Right next to this character is the
+            # SHA-1 of the submodule HEAD And after that comes the path to the
+            # submodule
+            path = line[1:].split(' ')[1]
+            submodules.append([path, line[0]])
+
+        self.run(["git", "submodule", "sync"], cwd=self.rust_root)
+
+        for submod in submodules:
+            path, status = submod
+            if path.endswith(b"llvm") and \
+                (self.get_toml('llvm-config') or self.get_mk('CFG_LLVM_ROOT')):
+                continue
+            if path.endswith(b"jemalloc") and \
+                (self.get_toml('jemalloc') or self.get_mk('CFG_JEMALLOC_ROOT')):
+                continue
+            submod_path = os.path.join(self.rust_root, path)
+
+            if status == ' ':
+                self.run(["git", "reset", "--hard"], cwd=submod_path)
+                self.run(["git", "clean", "-fdx"], cwd=submod_path)
+            elif status == '+':
+                self.run(["git", "submodule", "update", path], cwd=self.rust_root)
+                self.run(["git", "reset", "--hard"], cwd=submod_path)
+                self.run(["git", "clean", "-fdx"], cwd=submod_path)
+            elif status == '-':
+                self.run(["git", "submodule", "init", path], cwd=self.rust_root)
+                self.run(["git", "submodule", "update", path], cwd=self.rust_root)
+            else:
+                raise ValueError('unknown submodule status: ' + status)
+
 def bootstrap():
     parser = argparse.ArgumentParser(description='Build rust')
     parser.add_argument('--config')
@@ -597,6 +659,8 @@ def bootstrap():
     else:
         rb._download_url = 'https://static.rust-lang.org'
 
+    rb.update_submodules()
+
     # Fetch/build the bootstrap
     rb.build = rb.build_triple()
     rb.download_stage0()
index 9c536111811aab1d41a261c3130e4dc862ddad9c..0fb597564e33de15c2bc6defc8fd35273dfb43df 100644 (file)
@@ -99,7 +99,9 @@ pub struct Config {
     // Fallback musl-root for all targets
     pub musl_root: Option<PathBuf>,
     pub prefix: Option<PathBuf>,
+    pub sysconfdir: Option<PathBuf>,
     pub docdir: Option<PathBuf>,
+    pub bindir: Option<PathBuf>,
     pub libdir: Option<PathBuf>,
     pub libdir_relative: Option<PathBuf>,
     pub mandir: Option<PathBuf>,
@@ -165,9 +167,11 @@ struct Build {
 #[derive(RustcDecodable, Default, Clone)]
 struct Install {
     prefix: Option<String>,
-    mandir: Option<String>,
+    sysconfdir: Option<String>,
     docdir: Option<String>,
+    bindir: Option<String>,
     libdir: Option<String>,
+    mandir: Option<String>,
 }
 
 /// TOML representation of how the LLVM build is configured.
@@ -315,9 +319,11 @@ pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
 
         if let Some(ref install) = toml.install {
             config.prefix = install.prefix.clone().map(PathBuf::from);
-            config.mandir = install.mandir.clone().map(PathBuf::from);
+            config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from);
             config.docdir = install.docdir.clone().map(PathBuf::from);
+            config.bindir = install.bindir.clone().map(PathBuf::from);
             config.libdir = install.libdir.clone().map(PathBuf::from);
+            config.mandir = install.mandir.clone().map(PathBuf::from);
         }
 
         if let Some(ref llvm) = toml.llvm {
@@ -523,9 +529,15 @@ macro_rules! check {
                 "CFG_PREFIX" => {
                     self.prefix = Some(PathBuf::from(value));
                 }
+                "CFG_SYSCONFDIR" => {
+                    self.sysconfdir = Some(PathBuf::from(value));
+                }
                 "CFG_DOCDIR" => {
                     self.docdir = Some(PathBuf::from(value));
                 }
+                "CFG_BINDIR" => {
+                    self.bindir = Some(PathBuf::from(value));
+                }
                 "CFG_LIBDIR" => {
                     self.libdir = Some(PathBuf::from(value));
                 }
index 25da976a555e900653a45eee70d4751c7353edc8..df180be4e27ab0836b6df75d4b939aeac3c4fe7d 100644 (file)
 # Instead of installing to /usr/local, install to this path instead.
 #prefix = "/usr/local"
 
+# Where to install system configuration files
+# If this is a relative path, it will get installed in `prefix` above
+#sysconfdir = "/etc"
+
+# Where to install documentation in `prefix` above
+#docdir = "share/doc/rust"
+
+# Where to install binaries in `prefix` above
+#bindir = "bin"
+
 # Where to install libraries in `prefix` above
 #libdir = "lib"
 
 # Where to install man pages in `prefix` above
 #mandir = "share/man"
 
-# Where to install documentation in `prefix` above
-#docdir = "share/doc/rust"
-
 # =============================================================================
 # Options for compiling Rust code itself
 # =============================================================================
index 1b69f7413b569744664e1990a2c70c825a250503..511f2c9e80ec72e732caba01da7cbb468850745b 100644 (file)
 
 use build_helper::output;
 
-#[cfg(not(target_os = "solaris"))]
-const SH_CMD: &'static str = "sh";
-// On Solaris, sh is the historical bourne shell, not a POSIX shell, or bash.
-#[cfg(target_os = "solaris")]
-const SH_CMD: &'static str = "bash";
-
 use {Build, Compiler, Mode};
 use channel;
 use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
@@ -55,6 +49,10 @@ pub fn tmpdir(build: &Build) -> PathBuf {
     build.out.join("tmp/dist")
 }
 
+fn rust_installer(build: &Build) -> Command {
+    build.tool_cmd(&Compiler::new(0, &build.config.build), "rust-installer")
+}
+
 /// Builds the `rust-docs` installer component.
 ///
 /// Slurps up documentation from the `stage`'s `host`.
@@ -74,14 +72,14 @@ pub fn docs(build: &Build, stage: u32, host: &str) {
     let src = build.out.join(host).join("doc");
     cp_r(&src, &dst);
 
-    let mut cmd = Command::new(SH_CMD);
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust-Documentation")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=Rust-documentation-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
        .arg(format!("--package-name={}-{}", name, host))
        .arg("--component-name=rust-docs")
        .arg("--legacy-manifest-dirs=rustlib,cargo")
@@ -124,14 +122,14 @@ pub fn mingw(build: &Build, host: &str) {
        .arg(host);
     build.run(&mut cmd);
 
-    let mut cmd = Command::new(SH_CMD);
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust-MinGW")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=Rust-MinGW-is-installed.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
        .arg(format!("--package-name={}-{}", name, host))
        .arg("--component-name=rust-mingw")
        .arg("--legacy-manifest-dirs=rustlib,cargo");
@@ -190,15 +188,15 @@ pub fn rustc(build: &Build, stage: u32, host: &str) {
     }
 
     // Finally, wrap everything up in a nice tarball!
-    let mut cmd = Command::new(SH_CMD);
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
+       .arg("--non-installed-overlay").arg(&overlay)
        .arg(format!("--package-name={}-{}", name, host))
        .arg("--component-name=rustc")
        .arg("--legacy-manifest-dirs=rustlib,cargo");
@@ -300,14 +298,14 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) {
     let src = build.sysroot(compiler).join("lib/rustlib");
     cp_r(&src.join(target), &dst);
 
-    let mut cmd = Command::new(SH_CMD);
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=std-is-standing-at-the-ready.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
        .arg(format!("--package-name={}-{}", name, target))
        .arg(format!("--component-name=rust-std-{}", target))
        .arg("--legacy-manifest-dirs=rustlib,cargo");
@@ -356,14 +354,14 @@ pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
     println!("image_src: {:?}, dst: {:?}", image_src, dst);
     cp_r(&image_src, &dst);
 
-    let mut cmd = Command::new(SH_CMD);
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=save-analysis-saved.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
        .arg(format!("--package-name={}-{}", name, target))
        .arg(format!("--component-name=rust-analysis-{}", target))
        .arg("--legacy-manifest-dirs=rustlib,cargo");
@@ -471,13 +469,17 @@ pub fn rust_src(build: &Build) {
     write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
 
     // Create plain source tarball
-    let tarball = rust_src_location(build);
+    let mut tarball = rust_src_location(build);
+    tarball.set_extension(""); // strip .gz
+    tarball.set_extension(""); // strip .tar
     if let Some(dir) = tarball.parent() {
         t!(fs::create_dir_all(dir));
     }
-    let mut cmd = Command::new("tar");
-    cmd.arg("-czf").arg(sanitize_sh(&tarball))
-       .arg(&plain_name)
+    let mut cmd = rust_installer(build);
+    cmd.arg("tarball")
+       .arg("--input").arg(&plain_name)
+       .arg("--output").arg(&tarball)
+       .arg("--work-dir=.")
        .current_dir(tmpdir(build));
     build.run(&mut cmd);
 
@@ -521,14 +523,14 @@ pub fn rust_src(build: &Build) {
     }
 
     // Create source tarball in rust-installer format
-    let mut cmd = Command::new(SH_CMD);
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=Awesome-Source.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
        .arg(format!("--package-name={}", name))
        .arg("--component-name=rust-src")
        .arg("--legacy-manifest-dirs=rustlib,cargo");
@@ -594,7 +596,7 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
 
     // Prepare the image directory
     t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
-    t!(fs::create_dir_all(image.join("etc/bash_completions.d")));
+    t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
     let cargo = build.cargo_out(&compiler, Mode::Tool, target)
                      .join(exe("cargo", target));
     install(&cargo, &image.join("bin"), 0o755);
@@ -604,7 +606,7 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
     }
     install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
     copy(&etc.join("cargo.bashcomp.sh"),
-         &image.join("etc/bash_completions.d/cargo"));
+         &image.join("etc/bash_completion.d/cargo"));
     let doc = image.join("share/doc/cargo");
     install(&src.join("README.md"), &doc, 0o644);
     install(&src.join("LICENSE-MIT"), &doc, 0o644);
@@ -622,15 +624,15 @@ pub fn cargo(build: &Build, stage: u32, target: &str) {
     t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
 
     // Generate the installer tarball
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
+       .arg("--non-installed-overlay").arg(&overlay)
        .arg(format!("--package-name={}-{}", name, target))
        .arg("--component-name=cargo")
        .arg("--legacy-manifest-dirs=rustlib,cargo");
@@ -671,15 +673,15 @@ pub fn rls(build: &Build, stage: u32, target: &str) {
     t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
 
     // Generate the installer tarball
-    let mut cmd = Command::new("sh");
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("generate")
        .arg("--product-name=Rust")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=RLS-ready-to-serve.")
-       .arg(format!("--image-dir={}", sanitize_sh(&image)))
-       .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
-       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+       .arg("--image-dir").arg(&image)
+       .arg("--work-dir").arg(&tmpdir(build))
+       .arg("--output-dir").arg(&distdir(build))
+       .arg("--non-installed-overlay").arg(&overlay)
        .arg(format!("--package-name={}-{}", name, target))
        .arg("--component-name=rls")
        .arg("--legacy-manifest-dirs=rustlib,cargo");
@@ -730,29 +732,28 @@ pub fn extended(build: &Build, stage: u32, target: &str) {
     // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
     // the std files during uninstall. To do this ensure that rustc comes
     // before rust-std in the list below.
-    let mut input_tarballs = format!("{},{},{},{},{},{}",
-                                     sanitize_sh(&rustc_installer),
-                                     sanitize_sh(&cargo_installer),
-                                     sanitize_sh(&rls_installer),
-                                     sanitize_sh(&analysis_installer),
-                                     sanitize_sh(&docs_installer),
-                                     sanitize_sh(&std_installer));
+    let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
+                            analysis_installer, docs_installer, std_installer];
     if target.contains("pc-windows-gnu") {
-        input_tarballs.push_str(",");
-        input_tarballs.push_str(&sanitize_sh(&mingw_installer));
+        tarballs.push(mingw_installer);
+    }
+    let mut input_tarballs = tarballs[0].as_os_str().to_owned();
+    for tarball in &tarballs[1..] {
+        input_tarballs.push(",");
+        input_tarballs.push(tarball);
     }
 
-    let mut cmd = Command::new(SH_CMD);
-    cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/combine-installers.sh")))
+    let mut cmd = rust_installer(build);
+    cmd.arg("combine")
        .arg("--product-name=Rust")
        .arg("--rel-manifest-dir=rustlib")
        .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg(format!("--work-dir={}", sanitize_sh(&work)))
-       .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+       .arg("--work-dir").arg(&work)
+       .arg("--output-dir").arg(&distdir(build))
        .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
        .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg(format!("--input-tarballs={}", input_tarballs))
-       .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)));
+       .arg("--input-tarballs").arg(input_tarballs)
+       .arg("--non-installed-overlay").arg(&overlay);
     build.run(&mut cmd);
 
     let mut license = String::new();
index c805522fbf588d06b4676bb8bd0647fa2c1aea54..386b001971bad43c978fc91984b9537c119f41ae 100644 (file)
 /// Installs everything.
 pub fn install(build: &Build, stage: u32, host: &str) {
     let prefix_default = PathBuf::from("/usr/local");
+    let sysconfdir_default = PathBuf::from("/etc");
     let docdir_default = PathBuf::from("share/doc/rust");
-    let mandir_default = PathBuf::from("share/man");
+    let bindir_default = PathBuf::from("bin");
     let libdir_default = PathBuf::from("lib");
+    let mandir_default = PathBuf::from("share/man");
     let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
+    let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
     let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
+    let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
     let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
     let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
 
+    let sysconfdir = prefix.join(sysconfdir);
     let docdir = prefix.join(docdir);
+    let bindir = prefix.join(bindir);
     let libdir = prefix.join(libdir);
     let mandir = prefix.join(mandir);
 
     let destdir = env::var_os("DESTDIR").map(PathBuf::from);
 
     let prefix = add_destdir(&prefix, &destdir);
+    let sysconfdir = add_destdir(&sysconfdir, &destdir);
     let docdir = add_destdir(&docdir, &destdir);
+    let bindir = add_destdir(&bindir, &destdir);
     let libdir = add_destdir(&libdir, &destdir);
     let mandir = add_destdir(&mandir, &destdir);
 
@@ -47,29 +55,35 @@ pub fn install(build: &Build, stage: u32, host: &str) {
     t!(fs::create_dir_all(&empty_dir));
     if build.config.docs {
         install_sh(&build, "docs", "rust-docs", &build.rust_package_vers(),
-                   stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
     }
 
     for target in build.config.target.iter() {
         install_sh(&build, "std", "rust-std", &build.rust_package_vers(),
-                   stage, target, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, target, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
     }
 
     if build.config.extended {
         install_sh(&build, "cargo", "cargo", &build.cargo_package_vers(),
-                   stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
         install_sh(&build, "rls", "rls", &build.rls_package_vers(),
-                   stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+                   stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+                   &mandir, &empty_dir);
     }
 
     install_sh(&build, "rustc", "rustc", &build.rust_package_vers(),
-               stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+               stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+               &mandir, &empty_dir);
 
     t!(fs::remove_dir_all(&empty_dir));
 }
 
 fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u32, host: &str,
-              prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
+              prefix: &Path, sysconfdir: &Path, docdir: &Path, bindir: &Path, libdir: &Path,
+              mandir: &Path, empty_dir: &Path) {
     println!("Install {} stage{} ({})", package, stage, host);
     let package_name = format!("{}-{}-{}", name, version, host);
 
@@ -77,7 +91,9 @@ fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u3
     cmd.current_dir(empty_dir)
        .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
        .arg(format!("--prefix={}", sanitize_sh(prefix)))
+       .arg(format!("--sysconfdir={}", sanitize_sh(sysconfdir)))
        .arg(format!("--docdir={}", sanitize_sh(docdir)))
+       .arg(format!("--bindir={}", sanitize_sh(bindir)))
        .arg(format!("--libdir={}", sanitize_sh(libdir)))
        .arg(format!("--mandir={}", sanitize_sh(mandir)))
        .arg("--disable-ldconfig");
index ea0b521a2ce6934b8dc6fc96b9bba97651bb00cf..ca9de43f542193364b1806fe62f8f78be35ebb07 100644 (file)
@@ -82,7 +82,7 @@
 use std::ffi::OsString;
 use std::fs::{self, File};
 use std::io::Read;
-use std::path::{Component, PathBuf, Path};
+use std::path::{PathBuf, Path};
 use std::process::Command;
 
 use build_helper::{run_silent, run_suppressed, output, mtime};
@@ -285,129 +285,12 @@ pub fn build(&mut self) {
             self.verbose(&format!("auto-detected local-rebuild {}", local_release));
             self.local_rebuild = true;
         }
-        self.verbose("updating submodules");
-        self.update_submodules();
         self.verbose("learning about cargo");
         metadata::build(self);
 
         step::run(self);
     }
 
-    /// Updates all git submodules that we have.
-    ///
-    /// This will detect if any submodules are out of date an run the necessary
-    /// commands to sync them all with upstream.
-    fn update_submodules(&self) {
-        struct Submodule<'a> {
-            path: &'a Path,
-            state: State,
-        }
-
-        enum State {
-            // The submodule may have staged/unstaged changes
-            MaybeDirty,
-            // Or could be initialized but never updated
-            NotInitialized,
-            // The submodule, itself, has extra commits but those changes haven't been commited to
-            // the (outer) git repository
-            OutOfSync,
-        }
-
-        if !self.src_is_git || !self.config.submodules {
-            return
-        }
-        let git = || {
-            let mut cmd = Command::new("git");
-            cmd.current_dir(&self.src);
-            return cmd
-        };
-        let git_submodule = || {
-            let mut cmd = Command::new("git");
-            cmd.current_dir(&self.src).arg("submodule");
-            return cmd
-        };
-
-        // FIXME: this takes a seriously long time to execute on Windows and a
-        //        nontrivial amount of time on Unix, we should have a better way
-        //        of detecting whether we need to run all the submodule commands
-        //        below.
-        let out = output(git_submodule().arg("status"));
-        let mut submodules = vec![];
-        for line in out.lines() {
-            // NOTE `git submodule status` output looks like this:
-            //
-            // -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
-            // +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
-            //  e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
-            //
-            // The first character can be '-', '+' or ' ' and denotes the `State` of the submodule
-            // Right next to this character is the SHA-1 of the submodule HEAD
-            // And after that comes the path to the submodule
-            let path = Path::new(line[1..].split(' ').skip(1).next().unwrap());
-            let state = if line.starts_with('-') {
-                State::NotInitialized
-            } else if line.starts_with('+') {
-                State::OutOfSync
-            } else if line.starts_with(' ') {
-                State::MaybeDirty
-            } else {
-                panic!("unexpected git submodule state: {:?}", line.chars().next());
-            };
-
-            submodules.push(Submodule { path: path, state: state })
-        }
-
-        self.run(git_submodule().arg("sync"));
-
-        for submodule in submodules {
-            // If using llvm-root then don't touch the llvm submodule.
-            if submodule.path.components().any(|c| c == Component::Normal("llvm".as_ref())) &&
-                self.config.target_config.get(&self.config.build)
-                    .and_then(|c| c.llvm_config.as_ref()).is_some()
-            {
-                continue
-            }
-
-            if submodule.path.components().any(|c| c == Component::Normal("jemalloc".as_ref())) &&
-                !self.config.use_jemalloc
-            {
-                continue
-            }
-
-            // `submodule.path` is the relative path to a submodule (from the repository root)
-            // `submodule_path` is the path to a submodule from the cwd
-
-            // use `submodule.path` when e.g. executing a submodule specific command from the
-            // repository root
-            // use `submodule_path` when e.g. executing a normal git command for the submodule
-            // (set via `current_dir`)
-            let submodule_path = self.src.join(submodule.path);
-
-            match submodule.state {
-                State::MaybeDirty => {
-                    // drop staged changes
-                    self.run(git().current_dir(&submodule_path)
-                                  .args(&["reset", "--hard"]));
-                    // drops unstaged changes
-                    self.run(git().current_dir(&submodule_path)
-                                  .args(&["clean", "-fdx"]));
-                },
-                State::NotInitialized => {
-                    self.run(git_submodule().arg("init").arg(submodule.path));
-                    self.run(git_submodule().arg("update").arg(submodule.path));
-                },
-                State::OutOfSync => {
-                    // drops submodule commits that weren't reported to the (outer) git repository
-                    self.run(git_submodule().arg("update").arg(submodule.path));
-                    self.run(git().current_dir(&submodule_path)
-                                  .args(&["reset", "--hard"]));
-                    self.run(git().current_dir(&submodule_path)
-                                  .args(&["clean", "-fdx"]));
-                },
-            }
-        }
-    }
-
     /// Clear out `dir` if `input` is newer.
     ///
     /// After this executes, it will also ensure that `dir` exists.
@@ -475,12 +358,30 @@ fn cargo(&self,
              .env("RUSTDOC_REAL", self.rustdoc(compiler))
              .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
 
-        // Tools don't get debuginfo right now, e.g. cargo and rls don't get
-        // compiled with debuginfo.
         if mode != Mode::Tool {
-             cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
-                  .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
-                  .env("RUSTC_FORCE_UNSTABLE", "1");
+            // Tools don't get debuginfo right now, e.g. cargo and rls don't
+            // get compiled with debuginfo.
+            cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
+                 .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
+                 .env("RUSTC_FORCE_UNSTABLE", "1");
+
+            // Currently the compiler depends on crates from crates.io, and
+            // then other crates can depend on the compiler (e.g. proc-macro
+            // crates). Let's say, for example that rustc itself depends on the
+            // bitflags crate. If an external crate then depends on the
+            // bitflags crate as well, we need to make sure they don't
+            // conflict, even if they pick the same verison of bitflags. We'll
+            // want to make sure that e.g. a plugin and rustc each get their
+            // own copy of bitflags.
+
+            // Cargo ensures that this works in general through the -C metadata
+            // flag. This flag will frob the symbols in the binary to make sure
+            // they're different, even though the source code is the exact
+            // same. To solve this problem for the compiler we extend Cargo's
+            // already-passed -C metadata flag with our own. Our rustc.rs
+            // wrapper around the actual rustc will detect -C metadata being
+            // passed and frob it with this extra string we're passing in.
+            cargo.env("RUSTC_METADATA_SUFFIX", "rustc");
         }
 
         // Enable usage of unstable features
index 970c0bc565d864c533c255483c8a0e63a1d12d8c..92666e8e63907caec309922aae24759a968fdd33 100644 (file)
@@ -574,6 +574,10 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .dep(|s| s.name("maybe-clean-tools"))
          .dep(|s| s.name("libstd-tool"))
          .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
+    rules.build("tool-rust-installer", "src/tools/rust-installer")
+         .dep(|s| s.name("maybe-clean-tools"))
+         .dep(|s| s.name("libstd-tool"))
+         .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
     rules.build("tool-cargo", "src/tools/cargo")
          .host(true)
          .default(build.config.extended)
@@ -704,6 +708,7 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .host(true)
          .only_host_build(true)
          .default(true)
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::rustc(build, s.stage, s.target));
     rules.dist("dist-std", "src/libstd")
          .dep(move |s| {
@@ -718,10 +723,12 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          })
          .default(true)
          .only_host_build(true)
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::std(build, &s.compiler(), s.target));
     rules.dist("dist-mingw", "path/to/nowhere")
          .default(true)
          .only_host_build(true)
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| {
              if s.target.contains("pc-windows-gnu") {
                  dist::mingw(build, s.target)
@@ -732,21 +739,25 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .host(true)
          .only_build(true)
          .only_host_build(true)
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |_| dist::rust_src(build));
     rules.dist("dist-docs", "src/doc")
          .default(true)
          .only_host_build(true)
          .dep(|s| s.name("default:doc"))
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::docs(build, s.stage, s.target));
     rules.dist("dist-analysis", "analysis")
          .default(build.config.extended)
          .dep(|s| s.name("dist-std"))
          .only_host_build(true)
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::analysis(build, &s.compiler(), s.target));
     rules.dist("dist-rls", "rls")
          .host(true)
          .only_host_build(true)
          .dep(|s| s.name("tool-rls"))
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::rls(build, s.stage, s.target));
     rules.dist("install", "path/to/nowhere")
          .dep(|s| s.name("default:dist"))
@@ -755,6 +766,7 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .host(true)
          .only_host_build(true)
          .dep(|s| s.name("tool-cargo"))
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::cargo(build, s.stage, s.target));
     rules.dist("dist-extended", "extended")
          .default(build.config.extended)
@@ -767,6 +779,7 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .dep(|d| d.name("dist-cargo"))
          .dep(|d| d.name("dist-rls"))
          .dep(|d| d.name("dist-analysis"))
+         .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::extended(build, s.stage, s.target));
 
     rules.dist("dist-sign", "hash-and-sign")
@@ -778,6 +791,14 @@ fn crate_rule<'a, 'b>(build: &'a Build,
 
     rules.verify();
     return rules;
+
+    /// Helper to depend on a stage0 build-only rust-installer tool.
+    fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> {
+        step.name("tool-rust-installer")
+            .host(&build.config.build)
+            .target(&build.config.build)
+            .stage(0)
+    }
 }
 
 #[derive(PartialEq, Eq, Hash, Clone, Debug)]
index 6f3a7e091e1edb78ecf2feaa4afdbee02bb16a40..627b5062df3334701e4a64c0995bb42ada55c2e0 100644 (file)
@@ -16,6 +16,12 @@ for example:
 
 Images will output artifacts in an `obj` dir at the root of a repository.
 
+## Filesystem layout
+
+- Each directory, excluding `scripts` and `disabled`, corresponds to a docker image
+- `scripts` contains files shared by docker images
+- `disabled` contains images that are not build travis
+
 ## Cross toolchains
 
 A number of these images take quite a long time to compile as they're building
diff --git a/src/ci/docker/android-ndk.sh b/src/ci/docker/android-ndk.sh
deleted file mode 100644 (file)
index 4849f84..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_ndk() {
-    mkdir -p /android/ndk
-    cd /android/ndk
-    curl -O $URL/$1
-    unzip -q $1
-    rm $1
-    mv android-ndk-* ndk
-}
-
-make_standalone_toolchain() {
-    # See https://developer.android.com/ndk/guides/standalone_toolchain.htm
-    python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
-        --install-dir /android/ndk/$1-$2 \
-        --arch $1 \
-        --api $2
-}
-
-remove_ndk() {
-    rm -rf /android/ndk/ndk
-}
index 93f15baf55e7745fa912ab3d707ed75b9108269c..2a928c5ec7e894b4a1f331e879be3db19e15441d 100644 (file)
@@ -2,52 +2,44 @@ FROM ubuntu:16.04
 
 RUN apt-get update && \
     apt-get install -y --no-install-recommends \
+  ca-certificates \
+  cmake \
+  curl \
+  file \
   g++ \
+  git \
+  libssl-dev \
   make \
-  file \
-  curl \
-  ca-certificates \
+  pkg-config \
   python2.7 \
-  git \
-  cmake \
-  unzip \
   sudo \
-  xz-utils \
-  libssl-dev \
-  pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
+  unzip \
+  xz-utils
 
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
-    chmod +x /usr/local/bin/sccache
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
 
-# Install NDK
-COPY install-ndk.sh /tmp
-RUN . /tmp/install-ndk.sh && \
-    download_ndk android-ndk-r13b-linux-x86_64.zip && \
-    make_standalone_toolchain arm 9 && \
-    remove_ndk
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
+    download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm 9
 
-# Install SDK
+# sdk
 RUN dpkg --add-architecture i386 && \
     apt-get update && \
     apt-get install -y --no-install-recommends \
-  openjdk-9-jre-headless \
-  tzdata \
-  libstdc++6:i386 \
   libgl1-mesa-glx \
-  libpulse0
+  libpulse0 \
+  libstdc++6:i386 \
+  openjdk-9-jre-headless \
+  tzdata
 
-COPY install-sdk.sh /tmp
-RUN . /tmp/install-sdk.sh && \
-    download_sdk tools_r25.2.5-linux.zip && \
-    download_sysimage armeabi-v7a 18 && \
-    create_avd armeabi-v7a 18
+COPY scripts/android-sdk.sh /scripts/
+RUN . /scripts/android-sdk.sh && \
+    download_and_create_avd tools_r25.2.5-linux.zip armeabi-v7a 18
 
-# Setup env
+# env
 ENV PATH=$PATH:/android/sdk/tools
 ENV PATH=$PATH:/android/sdk/platform-tools
 
@@ -57,8 +49,12 @@ ENV RUST_CONFIGURE_ARGS \
       --target=$TARGETS \
       --arm-linux-androideabi-ndk=/android/ndk/arm-9
 
-ENV SCRIPT python2.7 ../x.py test --target $TARGETS --verbose
+ENV SCRIPT python2.7 ../x.py test --target $TARGETS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
 
-# Entrypoint
-COPY start-emulator.sh /android/
-ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
+# init
+COPY scripts/android-start-emulator.sh /scripts/
+ENTRYPOINT ["/usr/bin/dumb-init", "--", "/scripts/android-start-emulator.sh"]
diff --git a/src/ci/docker/arm-android/install-ndk.sh b/src/ci/docker/arm-android/install-ndk.sh
deleted file mode 100644 (file)
index 8081872..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_ndk() {
-    mkdir -p /android/ndk
-    cd /android/ndk
-    curl -O $URL/$1
-    unzip -q $1
-    rm $1
-    mv android-ndk-* ndk
-}
-
-make_standalone_toolchain() {
-    # See https://developer.android.com/ndk/guides/standalone_toolchain.html
-    python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
-        --install-dir /android/ndk/$1-$2 \
-        --arch $1 \
-        --api $2
-}
-
-remove_ndk() {
-    rm -rf /android/ndk/ndk
-}
diff --git a/src/ci/docker/arm-android/install-sdk.sh b/src/ci/docker/arm-android/install-sdk.sh
deleted file mode 100644 (file)
index 258fc47..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/sh
-# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_sdk() {
-    mkdir -p /android/sdk
-    cd /android/sdk
-    curl -O $URL/$1
-    unzip -q $1
-    rm -rf $1
-}
-
-download_sysimage() {
-    # See https://developer.android.com/studio/tools/help/android.html
-    abi=$1
-    api=$2
-
-    filter="platform-tools,android-$api"
-    filter="$filter,sys-img-$abi-android-$api"
-
-    # Keep printing yes to accept the licenses
-    while true; do echo yes; sleep 10; done | \
-        /android/sdk/tools/android update sdk -a --no-ui \
-            --filter "$filter"
-}
-
-create_avd() {
-    # See https://developer.android.com/studio/tools/help/android.html
-    abi=$1
-    api=$2
-
-    echo no | \
-        /android/sdk/tools/android create avd \
-            --name $abi-$api \
-            --target android-$api \
-            --abi $abi
-}
-
diff --git a/src/ci/docker/arm-android/start-emulator.sh b/src/ci/docker/arm-android/start-emulator.sh
deleted file mode 100755 (executable)
index cd3369d..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-# Setting SHELL to a file instead on a symlink helps android
-# emulator identify the system
-export SHELL=/bin/bash
-
-# Using the default qemu2 engine makes time::tests::since_epoch fails because
-# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using
-# classic engine the emulator starts with the current date and the tests run
-# fine. If another image is used, this need to be evaluated again.
-nohup nohup emulator @armeabi-v7a-18 \
-    -engine classic -no-window -partition-size 2047 0<&- &>/dev/null &
-
-exec "$@"
index 801de69a63d544733f6be98befeab05e1dcea31e..03e0b78ba89b3d83a8c3e2b6444aa0c7fe54b25d 100644 (file)
@@ -31,7 +31,7 @@ WORKDIR /build
 # The `vexpress_config` config file was a previously generated config file for
 # the kernel. This file was generated by running `make vexpress_defconfig`
 # followed by `make menuconfig` and then enabling the IPv6 protocol page.
-COPY vexpress_config /build/.config
+COPY armhf-gnu/vexpress_config /build/.config
 RUN curl https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.4.42.tar.xz | \
       tar xJf - && \
       cd /build/linux-4.4.42 && \
@@ -63,11 +63,11 @@ RUN curl http://cdimage.ubuntu.com/ubuntu-base/releases/16.04/release/ubuntu-bas
 
 # Copy over our init script, which starts up our test server and also a few
 # other misc tasks.
-COPY rcS rootfs/etc/init.d/rcS
+COPY armhf-gnu/rcS rootfs/etc/init.d/rcS
 RUN chmod +x rootfs/etc/init.d/rcS
 
 # Helper to quickly fill the entropy pool in the kernel.
-COPY addentropy.c /tmp/
+COPY armhf-gnu/addentropy.c /tmp/
 RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static
 
 # TODO: What is this?!
index 30a699c3ba2141429b86d86cee41fa3e0d9742f6..7759d91e1bb635c41ba6b89a426616ebda65a293 100644 (file)
@@ -32,10 +32,10 @@ ENTRYPOINT ["/usr/bin/dumb-init", "--"]
 
 WORKDIR /tmp
 
-COPY build-rumprun.sh /tmp/
+COPY cross/build-rumprun.sh /tmp/
 RUN ./build-rumprun.sh
 
-COPY build-arm-musl.sh /tmp/
+COPY cross/build-arm-musl.sh /tmp/
 RUN ./build-arm-musl.sh
 
 # originally from
index e15876edbd8dbf91acc2e865fe92f705f8fda6bc..918d2911ae28f5a5ba301294068740568e8a29e4 100644 (file)
@@ -2,36 +2,30 @@ FROM ubuntu:16.04
 
 RUN apt-get update && \
     apt-get install -y --no-install-recommends \
+  ca-certificates \
+  cmake \
+  curl \
+  file \
   g++ \
+  git \
+  libssl-dev \
   make \
-  file \
-  curl \
-  ca-certificates \
+  pkg-config \
   python2.7 \
-  git \
-  cmake \
-  unzip \
   sudo \
-  xz-utils \
-  libssl-dev \
-  pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
-      chmod +x /usr/local/bin/sccache
+  unzip \
+  xz-utils
 
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
 
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
-    download_ndk android-ndk-r13b-linux-x86_64.zip && \
-    make_standalone_toolchain arm64 21 && \
-    remove_ndk
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
+    download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm64 21
 
+# env
 ENV PATH=$PATH:/android/ndk/arm64-21/bin
 
 ENV DEP_Z_ROOT=/android/ndk/arm64-21/sysroot/usr/
@@ -47,3 +41,10 @@ ENV RUST_CONFIGURE_ARGS \
       --enable-cargo-openssl-static
 
 ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
index 0d81e404b5c0da40e2ca70762dbbd97e8a330c58..aed82e6c13872d6a276aeddd974cb6e81654b769 100644 (file)
@@ -2,37 +2,36 @@ FROM ubuntu:16.04
 
 RUN apt-get update && \
     apt-get install -y --no-install-recommends \
+  ca-certificates \
+  cmake \
+  curl \
+  file \
   g++ \
+  git \
+  libssl-dev \
   make \
-  file \
-  curl \
-  ca-certificates \
+  pkg-config \
   python2.7 \
-  git \
-  cmake \
-  unzip \
   sudo \
-  xz-utils \
-  libssl-dev \
-  pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
-      chmod +x /usr/local/bin/sccache
+  unzip \
+  xz-utils
 
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
 
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
     download_ndk android-ndk-r13b-linux-x86_64.zip && \
     make_standalone_toolchain arm 9 && \
     make_standalone_toolchain arm 21 && \
     remove_ndk
 
+RUN chmod 777 /android/ndk && \
+    ln -s /android/ndk/arm-21 /android/ndk/arm
+
+# env
 ENV PATH=$PATH:/android/ndk/arm-9/bin
 
 ENV DEP_Z_ROOT=/android/ndk/arm-9/sysroot/usr/
@@ -54,12 +53,16 @@ ENV RUST_CONFIGURE_ARGS \
 # level 9), the default linker behavior is to generate an error, to allow the
 # build to finish we use --warn-unresolved-symbols. Note that the missing
 # symbols does not affect std, only the compiler (llvm) and cargo (openssl).
-RUN chmod 777 /android/ndk && \
-    ln -s /android/ndk/arm-21 /android/ndk/arm
-
 ENV SCRIPT \
   python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
   (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
     rm /android/ndk/arm && \
     ln -s /android/ndk/arm-9 /android/ndk/arm && \
     python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
index 37930639b8a692e3dc766f99ff570f62594ff3fa..f012e869e7885fb4963e33f09bd616b9f64c7383 100644 (file)
@@ -2,37 +2,36 @@ FROM ubuntu:16.04
 
 RUN apt-get update && \
     apt-get install -y --no-install-recommends \
+  ca-certificates \
+  cmake \
+  curl \
+  file \
   g++ \
+  git \
+  libssl-dev \
   make \
-  file \
-  curl \
-  ca-certificates \
+  pkg-config \
   python2.7 \
-  git \
-  cmake \
-  unzip \
   sudo \
-  xz-utils \
-  libssl-dev \
-  pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
-      chmod +x /usr/local/bin/sccache
+  unzip \
+  xz-utils
 
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
 
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
     download_ndk android-ndk-r13b-linux-x86_64.zip && \
     make_standalone_toolchain x86 9 && \
     make_standalone_toolchain x86 21 && \
     remove_ndk
 
+RUN chmod 777 /android/ndk && \
+    ln -s /android/ndk/x86-21 /android/ndk/x86
+
+# env
 ENV PATH=$PATH:/android/ndk/x86-9/bin
 
 ENV DEP_Z_ROOT=/android/ndk/x86-9/sysroot/usr/
@@ -54,12 +53,16 @@ ENV RUST_CONFIGURE_ARGS \
 # level 9), the default linker behavior is to generate an error, to allow the
 # build to finish we use --warn-unresolved-symbols. Note that the missing
 # symbols does not affect std, only the compiler (llvm) and cargo (openssl).
-RUN chmod 777 /android/ndk && \
-    ln -s /android/ndk/x86-21 /android/ndk/x86
-
 ENV SCRIPT \
   python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
   (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
     rm /android/ndk/x86 && \
     ln -s /android/ndk/x86-9 /android/ndk/x86 && \
     python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
index a642d8ed6ecc27d06a160d1c8bcc107c07f6fc38..0c586452840f97afa0e098c8d3c85516bc9d5953 100644 (file)
@@ -2,36 +2,30 @@ FROM ubuntu:16.04
 
 RUN apt-get update && \
     apt-get install -y --no-install-recommends \
+  ca-certificates \
+  cmake \
+  curl \
+  file \
   g++ \
+  git \
+  libssl-dev \
   make \
-  file \
-  curl \
-  ca-certificates \
+  pkg-config \
   python2.7 \
-  git \
-  cmake \
-  unzip \
   sudo \
-  xz-utils \
-  libssl-dev \
-  pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
-      chmod +x /usr/local/bin/sccache
+  unzip \
+  xz-utils
 
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
 
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
-    download_ndk android-ndk-r13b-linux-x86_64.zip && \
-    make_standalone_toolchain x86_64 21 && \
-    remove_ndk
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
+    download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip x86_64 21
 
+# env
 ENV PATH=$PATH:/android/ndk/x86_64-21/bin
 
 ENV DEP_Z_ROOT=/android/ndk/x86_64-21/sysroot/usr/
@@ -47,3 +41,10 @@ ENV RUST_CONFIGURE_ARGS \
       --enable-cargo-openssl-static
 
 ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
index c8257c05acdd40466e2f38ecddee4a819811f227..0134a5407932ad08906e86e786fcaa5da3a05700 100644 (file)
@@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY aarch64-linux-gnu.config build-toolchains.sh /tmp/
+COPY dist-aarch64-linux/aarch64-linux-gnu.config dist-aarch64-linux/build-toolchains.sh /tmp/
 RUN ./build-toolchains.sh
 
 USER root
index 711c0ee5747000f4796bf855214ccf8753560064..31389dd148a8ae0cd23dc3f95f775226d4a5038f 100644 (file)
@@ -2,33 +2,27 @@ FROM ubuntu:16.04
 
 RUN apt-get update && \
     apt-get install -y --no-install-recommends \
+  ca-certificates \
+  cmake \
+  curl \
+  file \
   g++ \
+  git \
+  libssl-dev \
   make \
-  file \
-  curl \
-  ca-certificates \
+  pkg-config \
   python2.7 \
-  git \
-  cmake \
-  unzip \
   sudo \
-  xz-utils \
-  libssl-dev \
-  pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
-    chmod +x /usr/local/bin/sccache
+  unzip \
+  xz-utils
 
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
 
-# Install NDK
-COPY install-ndk.sh /tmp
-RUN . /tmp/install-ndk.sh && \
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
     download_ndk android-ndk-r13b-linux-x86_64.zip && \
     make_standalone_toolchain arm 9 && \
     make_standalone_toolchain x86 9 && \
@@ -36,6 +30,7 @@ RUN . /tmp/install-ndk.sh && \
     make_standalone_toolchain x86_64 21 && \
     remove_ndk
 
+# env
 ENV TARGETS=arm-linux-androideabi
 ENV TARGETS=$TARGETS,armv7-linux-androideabi
 ENV TARGETS=$TARGETS,i686-linux-android
@@ -52,3 +47,10 @@ ENV RUST_CONFIGURE_ARGS \
       --x86_64-linux-android-ndk=/android/ndk/x86_64-21
 
 ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
+
+# cache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
diff --git a/src/ci/docker/dist-android/install-ndk.sh b/src/ci/docker/dist-android/install-ndk.sh
deleted file mode 100644 (file)
index 8081872..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_ndk() {
-    mkdir -p /android/ndk
-    cd /android/ndk
-    curl -O $URL/$1
-    unzip -q $1
-    rm $1
-    mv android-ndk-* ndk
-}
-
-make_standalone_toolchain() {
-    # See https://developer.android.com/ndk/guides/standalone_toolchain.html
-    python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
-        --install-dir /android/ndk/$1-$2 \
-        --arch $1 \
-        --api $2
-}
-
-remove_ndk() {
-    rm -rf /android/ndk/ndk
-}
index af2b58f7d6b7f5d5ab8d82a721e0d0259e2898d7..862818a7c918250b43f6d7ac20287cd829520a3e 100644 (file)
@@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY arm-linux-gnueabi.config build-toolchains.sh /tmp/
+COPY dist-arm-linux/arm-linux-gnueabi.config dist-arm-linux/build-toolchains.sh /tmp/
 RUN ./build-toolchains.sh
 
 USER root
index 076bc50946cac66f8103260413b7cec93629b59b..7f1f91f844c771ba31c15fa22029ab6a5ddfecdf 100644 (file)
@@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY arm-linux-gnueabihf.config build-toolchains.sh /tmp/
+COPY dist-armhf-linux/arm-linux-gnueabihf.config dist-armhf-linux/build-toolchains.sh /tmp/
 RUN ./build-toolchains.sh
 
 USER root
index 9367a5a6270cf257b12b5f52ba71b5054f38a560..030fd24ebcdd0c1daa5d69b724175d65c21c4933 100644 (file)
@@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY build-toolchains.sh armv7-linux-gnueabihf.config /tmp/
+COPY dist-armv7-linux/build-toolchains.sh dist-armv7-linux/armv7-linux-gnueabihf.config /tmp/
 RUN ./build-toolchains.sh
 
 USER root
index 8699e0d87d7fafad7a5fa995578a2cdb8b7f001b..d1d9767d35e631c9c70f572b55a2ebc07f90a4ba 100644 (file)
@@ -21,7 +21,7 @@ RUN curl -L https://cmake.org/files/v3.8/cmake-3.8.0-rc1-Linux-x86_64.tar.gz | \
       tar xzf - -C /usr/local --strip-components=1
 
 WORKDIR /tmp
-COPY shared.sh build-toolchain.sh compiler-rt-dso-handle.patch /tmp/
+COPY dist-fuchsia/shared.sh dist-fuchsia/build-toolchain.sh dist-fuchsia/compiler-rt-dso-handle.patch /tmp/
 RUN /tmp/build-toolchain.sh
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 3e823339eaaf9890195627f45e592e5c7f2e42ff..805d238de1f9b5c78dcc4a9d7d281e07c38ca717 100644 (file)
@@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   pkg-config
 
 WORKDIR /build/
-COPY musl-libunwind-patch.patch build-musl.sh /build/
+COPY dist-i586-gnu-i686-musl/musl-libunwind-patch.patch dist-i586-gnu-i686-musl/build-musl.sh /build/
 RUN sh /build/build-musl.sh && rm -rf /build
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index a1f36257f960ab022f4a0c3a0a9b7b26bda24fac..9c4d43bfa92bc600899cc8acd0881bd343f4b2cb 100644 (file)
@@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   libssl-dev \
   pkg-config
 
-COPY build-toolchain.sh /tmp/
+COPY dist-i686-freebsd/build-toolchain.sh /tmp/
 RUN /tmp/build-toolchain.sh i686
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 8335147de60a02514181afc3cc4146dc9d9da99d..a3c08e93ed158ce47175d05deef91f95bd5feaf5 100644 (file)
@@ -29,13 +29,13 @@ ENV PATH=/rustroot/bin:$PATH
 ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
 ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
 WORKDIR /tmp
-COPY shared.sh build-binutils.sh /tmp/
+COPY dist-i686-linux/shared.sh dist-i686-linux/build-binutils.sh /tmp/
 
 # We need a build of openssl which supports SNI to download artifacts from
 # static.rust-lang.org. This'll be used to link into libcurl below (and used
 # later as well), so build a copy of OpenSSL with dynamic libraries into our
 # generic root.
-COPY build-openssl.sh /tmp/
+COPY dist-i686-linux/build-openssl.sh /tmp/
 RUN ./build-openssl.sh
 
 # The `curl` binary on CentOS doesn't support SNI which is needed for fetching
@@ -44,7 +44,7 @@ RUN ./build-openssl.sh
 #
 # Note that we also disable a bunch of optional features of curl that we don't
 # really need.
-COPY build-curl.sh /tmp/
+COPY dist-i686-linux/build-curl.sh /tmp/
 RUN ./build-curl.sh
 
 # binutils < 2.22 has a bug where the 32-bit executables it generates
@@ -54,26 +54,26 @@ RUN ./build-curl.sh
 RUN ./build-binutils.sh
 
 # Need a newer version of gcc than centos has to compile LLVM nowadays
-COPY build-gcc.sh /tmp/
+COPY dist-i686-linux/build-gcc.sh /tmp/
 RUN ./build-gcc.sh
 
 # CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
-COPY build-python.sh /tmp/
+COPY dist-i686-linux/build-python.sh /tmp/
 RUN ./build-python.sh
 
 # Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
 # cloning, so download and build it here.
-COPY build-git.sh /tmp/
+COPY dist-i686-linux/build-git.sh /tmp/
 RUN ./build-git.sh
 
 # libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
 # only has 2.6.4, so build our own
-COPY build-cmake.sh /tmp/
+COPY dist-i686-linux/build-cmake.sh /tmp/
 RUN ./build-cmake.sh
 
 # for sanitizers, we need kernel headers files newer than the ones CentOS ships
 # with so we install newer ones here
-COPY build-headers.sh /tmp/
+COPY dist-i686-linux/build-headers.sh /tmp/
 RUN ./build-headers.sh
 
 RUN curl -Lo /rustroot/dumb-init \
index bff6504749e1a516e8b3e58b99f5ad59445a8c96..0074665f34f7ed54f0ddff57da78bfa71dfbf89e 100644 (file)
@@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY patches/ /tmp/patches/
-COPY powerpc-linux-gnu.config build-powerpc-toolchain.sh /tmp/
+COPY dist-powerpc-linux/patches/ /tmp/patches/
+COPY dist-powerpc-linux/powerpc-linux-gnu.config dist-powerpc-linux/build-powerpc-toolchain.sh /tmp/
 RUN ./build-powerpc-toolchain.sh
 
 USER root
index 58b09fd0fa7316af8964eb4b57e8771915ac08e8..bd38ee0c111582a2f88dbf4f53d2ddb87b7eac57 100644 (file)
@@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY patches/ /tmp/patches/
-COPY shared.sh powerpc64-linux-gnu.config build-powerpc64-toolchain.sh /tmp/
+COPY dist-powerpc64-linux/patches/ /tmp/patches/
+COPY dist-powerpc64-linux/shared.sh dist-powerpc64-linux/powerpc64-linux-gnu.config dist-powerpc64-linux/build-powerpc64-toolchain.sh /tmp/
 RUN ./build-powerpc64-toolchain.sh
 
 USER root
index 08f1d1d7ed5f7518083f0382f2807ba084e1615c..cbded156b4cbde53f26d4ce6508eaeeb9e74395b 100644 (file)
@@ -59,7 +59,7 @@ WORKDIR /tmp
 USER root
 
 RUN apt-get install -y --no-install-recommends rpm2cpio cpio
-COPY shared.sh build-powerpc64le-toolchain.sh /tmp/
+COPY dist-powerpc64le-linux/shared.sh dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /tmp/
 RUN ./build-powerpc64le-toolchain.sh
 
 RUN curl -o /usr/local/bin/sccache \
index 5eb238fa887b660c7fdfe5b6b439566cd6cfdcf7..5c00287107aa3be399eda238a718a0050ec65b82 100644 (file)
@@ -56,8 +56,8 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY patches/ /tmp/patches/
-COPY s390x-linux-gnu.config build-s390x-toolchain.sh /tmp/
+COPY dist-s390x-linux/patches/ /tmp/patches/
+COPY dist-s390x-linux/s390x-linux-gnu.config dist-s390x-linux/build-s390x-toolchain.sh /tmp/
 RUN ./build-s390x-toolchain.sh
 
 USER root
index 0ac58468147d23b1f60194d6f00a8030ed8b38b2..a6c4eee5e812c5407e07fdce89ee73c4864e60de 100644 (file)
@@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   libssl-dev \
   pkg-config
 
-COPY build-toolchain.sh /tmp/
+COPY dist-x86_64-freebsd/build-toolchain.sh /tmp/
 RUN /tmp/build-toolchain.sh x86_64
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index d688bb7f8a4c4b0b60656736bac9ad29a4aded6e..e2e42836dcdaf136786b7ad9b0194493d817cc83 100644 (file)
@@ -29,13 +29,13 @@ ENV PATH=/rustroot/bin:$PATH
 ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
 ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
 WORKDIR /tmp
-COPY shared.sh build-binutils.sh /tmp/
+COPY dist-x86_64-linux/shared.sh dist-x86_64-linux/build-binutils.sh /tmp/
 
 # We need a build of openssl which supports SNI to download artifacts from
 # static.rust-lang.org. This'll be used to link into libcurl below (and used
 # later as well), so build a copy of OpenSSL with dynamic libraries into our
 # generic root.
-COPY build-openssl.sh /tmp/
+COPY dist-x86_64-linux/build-openssl.sh /tmp/
 RUN ./build-openssl.sh
 
 # The `curl` binary on CentOS doesn't support SNI which is needed for fetching
@@ -44,7 +44,7 @@ RUN ./build-openssl.sh
 #
 # Note that we also disable a bunch of optional features of curl that we don't
 # really need.
-COPY build-curl.sh /tmp/
+COPY dist-x86_64-linux/build-curl.sh /tmp/
 RUN ./build-curl.sh
 
 # binutils < 2.22 has a bug where the 32-bit executables it generates
@@ -54,26 +54,26 @@ RUN ./build-curl.sh
 RUN ./build-binutils.sh
 
 # Need a newer version of gcc than centos has to compile LLVM nowadays
-COPY build-gcc.sh /tmp/
+COPY dist-x86_64-linux/build-gcc.sh /tmp/
 RUN ./build-gcc.sh
 
 # CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
-COPY build-python.sh /tmp/
+COPY dist-x86_64-linux/build-python.sh /tmp/
 RUN ./build-python.sh
 
 # Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
 # cloning, so download and build it here.
-COPY build-git.sh /tmp/
+COPY dist-x86_64-linux/build-git.sh /tmp/
 RUN ./build-git.sh
 
 # libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
 # only has 2.6.4, so build our own
-COPY build-cmake.sh /tmp/
+COPY dist-x86_64-linux/build-cmake.sh /tmp/
 RUN ./build-cmake.sh
 
 # for sanitizers, we need kernel headers files newer than the ones CentOS ships
 # with so we install newer ones here
-COPY build-headers.sh /tmp/
+COPY dist-x86_64-linux/build-headers.sh /tmp/
 RUN ./build-headers.sh
 
 RUN curl -Lo /rustroot/dumb-init \
index 87550641bc6d58528ecab2a9d9bbb9fcee0eaa60..2eea5ab1469728e210795b257e85ce33f2fb56db 100644 (file)
@@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   pkg-config
 
 WORKDIR /build/
-COPY build-musl.sh /build/
+COPY dist-x86_64-musl/build-musl.sh /build/
 RUN sh /build/build-musl.sh && rm -rf /build
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index b6d9c221c1ce85bae3913c393ce08664c35ca92e..f76e6271f4c8c22eddfbb4eec38760f60570fac3 100644 (file)
@@ -56,7 +56,7 @@ RUN mkdir /x-tools && chown rustbuild:rustbuild /x-tools
 USER rustbuild
 WORKDIR /tmp
 
-COPY build-netbsd-toolchain.sh /tmp/
+COPY dist-x86_64-netbsd/build-netbsd-toolchain.sh /tmp/
 RUN ./build-netbsd-toolchain.sh
 
 USER root
index 09657d2f89211febe8a0ddc70c629dc889bbb062..0f0e5b69c32cf787d2a2fafb6dacbd370bce6c7a 100644 (file)
@@ -24,7 +24,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
 ENTRYPOINT ["/usr/bin/dumb-init", "--"]
 
 WORKDIR /tmp
-COPY build-emscripten.sh /tmp/
+COPY emscripten/build-emscripten.sh /tmp/
 RUN ./build-emscripten.sh
 ENV PATH=$PATH:/tmp/emsdk_portable
 ENV PATH=$PATH:/tmp/emsdk_portable/clang/tag-e1.37.10/build_tag-e1.37.10_32/bin
index 6abbf0530afa69c6ebb333823f8b3d562495bb9f..bb9a860574dd2f175245cd6e8e97fc911d8787c7 100755 (executable)
@@ -26,7 +26,8 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
       build \
       --rm \
       -t rust-ci \
-      "$docker_dir/$image"
+      -f "$docker_dir/$image/Dockerfile" \
+      "$docker_dir"
 elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
     if [ -n "$TRAVIS_OS_NAME" ]; then
         echo Cannot run disabled images on travis!
diff --git a/src/ci/docker/scripts/android-ndk.sh b/src/ci/docker/scripts/android-ndk.sh
new file mode 100644 (file)
index 0000000..c3d83c0
--- /dev/null
@@ -0,0 +1,40 @@
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+    mkdir -p /android/ndk
+    cd /android/ndk
+    curl -O $URL/$1
+    unzip -q $1
+    rm $1
+    mv android-ndk-* ndk
+}
+
+make_standalone_toolchain() {
+    # See https://developer.android.com/ndk/guides/standalone_toolchain.htm
+    python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+        --install-dir /android/ndk/$1-$2 \
+        --arch $1 \
+        --api $2
+}
+
+remove_ndk() {
+    rm -rf /android/ndk/ndk
+}
+
+download_and_make_toolchain() {
+    download_ndk $1 && \
+    make_standalone_toolchain $2 $3 && \
+    remove_ndk
+}
diff --git a/src/ci/docker/scripts/android-sdk.sh b/src/ci/docker/scripts/android-sdk.sh
new file mode 100644 (file)
index 0000000..7d8110e
--- /dev/null
@@ -0,0 +1,53 @@
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+URL=https://dl.google.com/android/repository
+
+download_sdk() {
+    mkdir -p /android/sdk
+    cd /android/sdk
+    curl -O $URL/$1
+    unzip -q $1
+    rm -rf $1
+}
+
+download_sysimage() {
+    # See https://developer.android.com/studio/tools/help/android.html
+    abi=$1
+    api=$2
+
+    filter="platform-tools,android-$api"
+    filter="$filter,sys-img-$abi-android-$api"
+
+    # Keep printing yes to accept the licenses
+    while true; do echo yes; sleep 10; done | \
+        /android/sdk/tools/android update sdk -a --no-ui \
+            --filter "$filter"
+}
+
+create_avd() {
+    # See https://developer.android.com/studio/tools/help/android.html
+    abi=$1
+    api=$2
+
+    echo no | \
+        /android/sdk/tools/android create avd \
+            --name $abi-$api \
+            --target android-$api \
+            --abi $abi
+}
+
+download_and_create_avd() {
+    download_sdk $1
+    download_sysimage $2 $3
+    create_avd $2 $3
+}
diff --git a/src/ci/docker/scripts/android-start-emulator.sh b/src/ci/docker/scripts/android-start-emulator.sh
new file mode 100755 (executable)
index 0000000..cd3369d
--- /dev/null
@@ -0,0 +1,25 @@
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+# Setting SHELL to a file instead on a symlink helps android
+# emulator identify the system
+export SHELL=/bin/bash
+
+# Using the default qemu2 engine makes time::tests::since_epoch fails because
+# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using
+# classic engine the emulator starts with the current date and the tests run
+# fine. If another image is used, this need to be evaluated again.
+nohup nohup emulator @armeabi-v7a-18 \
+    -engine classic -no-window -partition-size 2047 0<&- &>/dev/null &
+
+exec "$@"
diff --git a/src/ci/docker/scripts/dumb-init.sh b/src/ci/docker/scripts/dumb-init.sh
new file mode 100644 (file)
index 0000000..839c390
--- /dev/null
@@ -0,0 +1,15 @@
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb
+dpkg -i dumb-init_*.deb
+rm dumb-init_*.deb
diff --git a/src/ci/docker/scripts/sccache.sh b/src/ci/docker/scripts/sccache.sh
new file mode 100644 (file)
index 0000000..7a2befa
--- /dev/null
@@ -0,0 +1,16 @@
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+curl -o /usr/local/bin/sccache \
+  https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl
+
+chmod +x /usr/local/bin/sccache
index ad7de198561b3a12217ea2da76d796d9c7fc0ed3..97422981c53a00f7c3d6584d363443117f179fff 160000 (submodule)
@@ -1 +1 @@
-Subproject commit ad7de198561b3a12217ea2da76d796d9c7fc0ed3
+Subproject commit 97422981c53a00f7c3d6584d363443117f179fff
index 6b0de90d87dda15e323ef24cdf7ed873ac5cf4d3..f7a108dfa9e90b07821700c55d01f08a9adf005c 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 6b0de90d87dda15e323ef24cdf7ed873ac5cf4d3
+Subproject commit f7a108dfa9e90b07821700c55d01f08a9adf005c
index 8f26e4d36cda2dd52d48073160c5e3073d197c1e..39f800591483bdbf05528244bf24281e76c992d2 100644 (file)
     - [unique](library-features/unique.md)
     - [unsize](library-features/unsize.md)
     - [utf8_error_error_len](library-features/utf8-error-error-len.md)
+    - [vec_resize_default](library-features/vec-resize-default.md)
     - [vec_remove_item](library-features/vec-remove-item.md)
     - [windows_c](library-features/windows-c.md)
     - [windows_handle](library-features/windows-handle.md)
index 30d22ca8208bff24c1b24f3ce7c3833e9a33609a..e8256469b145078be9889c75600c86e64081012c 100644 (file)
@@ -4,7 +4,7 @@ The tracking issue for this feature is: [#23121]
 
 [#23121]: https://github.com/rust-lang/rust/issues/23121
 
-See also [`slice_patterns`](slice-patterns.html).
+See also [`slice_patterns`](language-features/slice-patterns.html).
 
 ------------------------
 
index 5e68be633e7ab2d69aa066853c888b7ccf5c3467..8deb8f462562063f67cac3e1bb1ac85621444ba7 100644 (file)
@@ -190,4 +190,4 @@ constraints, etc.
 [llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
 
 If you need more power and don't mind losing some of the niceties of
-`asm!`, check out [global_asm](global_asm.html).
+`asm!`, check out [global_asm](language-features/global_asm.html).
index 86346364a71356caf4d19ad45f82c36d05d4d5c3..0896627acae1b3203a39d1463bcae4d6e470fa19 100644 (file)
@@ -4,7 +4,7 @@ The tracking issue for this feature is: [#29641]
 
 [#29641]: https://github.com/rust-lang/rust/issues/29641
 
-See also [`box_syntax`](box-syntax.html)
+See also [`box_syntax`](language-features/box-syntax.html)
 
 ------------------------
 
index 47aade0d04563016192fac4f0d8a8c0a6f930a79..50e59231a4df22d19608d91d470f54fceb645a83 100644 (file)
@@ -4,7 +4,7 @@ The tracking issue for this feature is: [#27779]
 
 [#27779]: https://github.com/rust-lang/rust/issues/27779
 
-See also [`box_patterns`](box-patterns.html)
+See also [`box_patterns`](language-features/box-patterns.html)
 
 ------------------------
 
index 44921aa309f8444218554757121cc1b2a9b5967e..f1ef74a63b513acedd4ff96396812640b451ac9b 100644 (file)
@@ -74,5 +74,5 @@ usages and placed the larger, single usage in the crate root.
 
 If you don't need quite as much power and flexibility as
 `global_asm!` provides, and you don't mind restricting your inline
-assembly to `fn` bodies only, you might try the [asm](asm.html)
-feature instead.
+assembly to `fn` bodies only, you might try the
+[asm](language-features/asm.html) feature instead.
index 54d6e62ce4c526b088fbfb2d22940bddd35d4b57..e8fefe3b73344b4376cfce092dc8b9d37cf21bff 100644 (file)
@@ -4,7 +4,80 @@ The tracking issue for this feature is: [#37339]
 
 [#37339]: https://github.com/rust-lang/rust/issues/37339
 
+Documentation to be appended to section G of the book.
+
 ------------------------
 
+### Loops as expressions
+
+Like most things in Rust, loops are expressions, and have a value; normally `()` unless the loop
+never exits.
+A `loop` can instead evaluate to a useful value via *break with value*:
+
+```rust
+#![feature(loop_break_value)]
+
+// Find the first square number over 1000:
+let mut n = 1;
+let square = loop {
+    if n * n > 1000 {
+        break n * n;
+    }
+    n += 1;
+};
+```
+
+The evaluation type may be specified externally:
+
+```rust
+#![feature(loop_break_value)]
+
+// Declare that value returned is unsigned 64-bit:
+let n: u64 = loop {
+    break 1;
+};
+```
+
+It is an error if types do not agree, either between a "break" value and an external requirement,
+or between multiple "break" values:
+
+```no_compile
+#![feature(loop_break_value)]
+
+loop {
+    if true {
+        break 1u32;
+    } else {
+        break 0u8;  // error: types do not agree
+    }
+};
+
+let n: i32 = loop {
+    break 0u32; // error: type does not agree with external requirement
+};
+```
+
+#### Break: label, value
+
+Four forms of `break` are available, where EXPR is some expression which evaluates to a value:
+
+1.  `break;`
+2.  `break 'label;`
+3.  `break EXPR;`
+4.  `break 'label EXPR;`
+
+When no value is given, the value `()` is assumed, thus `break;` is equivalent to `break ();`.
+
+Using a label allows returning a value from an inner loop:
 
+```rust
+#![feature(loop_break_value)]
 
+let result = 'outer: loop {
+    for n in 1..10 {
+        if n > 4 {
+            break 'outer n;
+        }
+    }
+};
+```
index ca3738bd93f831dbb5b5955f2d227468f121c7f9..b16e2ac2d221c97941e4f4ba0c01b31781335bed 100644 (file)
@@ -8,6 +8,6 @@ This feature is part of "compiler plugins." It will often be used with the
 [`plugin`] and `rustc_private` features as well. For more details, see
 their docs.
 
-[`plugin`]: plugin.html
+[`plugin`]: language-features/plugin.html
 
 ------------------------
index 3a1872e18ddb8a6968594016964fa4f8a5569522..4b8603e3c445045147c42de687955d1582c44e7d 100644 (file)
@@ -8,7 +8,7 @@ The tracking issue for this feature is: [#29597]
 This feature is part of "compiler plugins." It will often be used with the
 [`plugin_registrar`] and `rustc_private` features.
 
-[`plugin_registrar`]: plugin-registrar.html
+[`plugin_registrar`]: language-features/plugin-registrar.html
 
 ------------------------
 
index f8b53bd5a2fd398a9119c3b1506dff84550f4db3..1bd8c41629eea01712d001f940d2e797ee47138f 100644 (file)
@@ -6,5 +6,236 @@ The tracking issue for this feature is: [#38356]
 
 ------------------------
 
+This feature flag guards the new procedural macro features as laid out by [RFC 1566], which alongside the now-stable 
+[custom derives], provide stabilizable alternatives to the compiler plugin API (which requires the use of 
+perma-unstable internal APIs) for programmatically modifying Rust code at compile-time.
 
+The two new procedural macro kinds are:
+* Function-like procedural macros which are invoked like regular declarative macros, and:
 
+* Attribute-like procedural macros which can be applied to any item which built-in attributes can
+be applied to, and which can take arguments in their invocation as well.
+
+Additionally, this feature flag implicitly enables the [`use_extern_macros`](language-features/use-extern-macros.html) feature,
+which allows macros to be imported like any other item with `use` statements, as compared to 
+applying `#[macro_use]` to an `extern crate` declaration. It is important to note that procedural macros may
+**only** be imported in this manner, and will throw an error otherwise.
+
+You **must** declare the `proc_macro` feature in both the crate declaring these new procedural macro kinds as well as 
+in any crates that use them.
+
+### Common Concepts
+
+As with custom derives, procedural macros may only be declared in crates of the `proc-macro` type, and must be public
+functions. No other public items may be declared in `proc-macro` crates, but private items are fine.
+
+To declare your crate as a `proc-macro` crate, simply add:
+
+```toml
+[lib]
+proc-macro = true
+```
+
+to your `Cargo.toml`. 
+
+Unlike custom derives, however, the name of the function implementing the procedural macro is used directly as the 
+procedural macro's name, so choose carefully.
+
+Additionally, both new kinds of procedural macros return a `TokenStream` which *wholly* replaces the original 
+invocation and its input.
+
+#### Importing
+
+As referenced above, the new procedural macros are not meant to be imported via `#[macro_use]` and will throw an 
+error if they are. Instead, they are meant to be imported like any other item in Rust, with `use` statements:
+
+```rust,ignore
+#![feature(proc_macro)]
+
+// Where `my_proc_macros` is some crate of type `proc_macro`
+extern crate my_proc_macros;
+
+// And declares a `#[proc_macro] pub fn my_bang_macro()` at its root.
+use my_proc_macros::my_bang_macro;
+
+fn main() {
+    println!("{}", my_bang_macro!());
+}
+```
+
+#### Error Reporting
+
+Any panics in a procedural macro implementation will be caught by the compiler and turned into an error message pointing 
+to the problematic invocation. Thus, it is important to make your panic messages as informative as possible: use 
+`Option::expect` instead of `Option::unwrap` and `Result::expect` instead of `Result::unwrap`, and inform the user of 
+the error condition as unambiguously as you can.
+#### `TokenStream`
+
+The `proc_macro::TokenStream` type is hardcoded into the signatures of procedural macro functions for both input and 
+output. It is a wrapper around the compiler's internal representation for a given chunk of Rust code.
+
+### Function-like Procedural Macros
+
+These are procedural macros that are invoked like regular declarative macros. They are declared as public functions in 
+crates of the `proc_macro` type and using the `#[proc_macro]` attribute. The name of the declared function becomes the 
+name of the macro as it is to be imported and used. The function must be of the kind `fn(TokenStream) -> TokenStream` 
+where the sole argument is the input to the macro and the return type is the macro's output.
+
+This kind of macro can expand to anything that is valid for the context it is invoked in, including expressions and
+statements, as well as items.
+
+**Note**: invocations of this kind of macro require a wrapping `[]`, `{}` or `()` like regular macros, but these do not 
+appear in the input, only the tokens between them. The tokens between the braces do not need to be valid Rust syntax.
+
+<span class="filename">my_macro_crate/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+// This is always necessary to get the `TokenStream` typedef.
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro]
+pub fn say_hello(_input: TokenStream) -> TokenStream {
+    // This macro will accept any input because it ignores it. 
+    // To enforce correctness in macros which don't take input,
+    // you may want to add `assert!(_input.to_string().is_empty());`.
+    "println!(\"Hello, world!\")".parse().unwrap()
+}
+```
+
+<span class="filename">my_macro_user/Cargo.toml</span>
+
+```toml
+[dependencies]
+my_macro_crate = { path = "<relative path to my_macro_crate>" }
+```
+
+<span class="filename">my_macro_user/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate my_macro_crate;
+
+use my_macro_crate::say_hello;
+
+fn main() {
+    say_hello!();
+}
+```
+
+As expected, this prints `Hello, world!`.
+
+### Attribute-like Procedural Macros
+
+These are arguably the most powerful flavor of procedural macro as they can be applied anywhere attributes are allowed. 
+
+They are declared as public functions in crates of the `proc-macro` type, using the `#[proc_macro_attribute]` attribute. 
+The name of the function becomes the name of the attribute as it is to be imported and used. The function must be of the 
+kind `fn(TokenStream, TokenStream) -> TokenStream` where:
+
+The first argument represents any metadata for the attribute (see [the reference chapter on attributes][refr-attr]). 
+Only the metadata itself will appear in this argument, for example:
+ * `#[my_macro]` will get an empty string.
+ * `#[my_macro = "string"]` will get `= "string"`.
+ * `#[my_macro(ident)]` will get `(ident)`.
+ * etc.
+The second argument is the item that the attribute is applied to. It can be a function, a type definition, 
+an impl block, an `extern` block, or a module—attribute invocations can take the inner form (`#![my_attr]`) 
+or outer form (`#[my_attr]`).
+
+The return type is the output of the macro which *wholly* replaces the item it was applied to. Thus, if your intention
+is to merely modify an item, it *must* be copied to the output. The output must be an item; expressions, statements
+and bare blocks are not allowed.
+
+There is no restriction on how many items an attribute-like procedural macro can emit as long as they are valid in 
+the given context.
+
+<span class="filename">my_macro_crate/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+/// Adds a `/// ### Panics` docstring to the end of the input's documentation
+///
+/// Does not assert that its receiver is a function or method.
+#[proc_macro_attribute]
+pub fn panics_note(args: TokenStream, input: TokenStream) -> TokenStream {
+    let args = args.to_string();
+    let mut input = input.to_string();
+
+    assert!(args.starts_with("= \""), "`#[panics_note]` requires an argument of the form \
+                                       `#[panics_note = \"panic note here\"]`");
+
+    // Get just the bare note string
+    let panics_note = args.trim_matches(&['=', ' ', '"'][..]);
+
+    // The input will include all docstrings regardless of where the attribute is placed,
+    // so we need to find the last index before the start of the item
+    let insert_idx = idx_after_last_docstring(&input);
+
+    // And insert our `### Panics` note there so it always appears at the end of an item's docs
+    input.insert_str(insert_idx, &format!("/// # Panics \n/// {}\n", panics_note));
+
+    input.parse().unwrap()
+}
+
+// `proc-macro` crates can contain any kind of private item still
+fn idx_after_last_docstring(input: &str) -> usize {
+    // Skip docstring lines to find the start of the item proper
+    input.lines().skip_while(|line| line.trim_left().starts_with("///")).next()
+        // Find the index of the first non-docstring line in the input
+        // Note: assumes this exact line is unique in the input
+        .and_then(|line_after| input.find(line_after))
+        // No docstrings in the input
+        .unwrap_or(0)
+}
+```
+
+<span class="filename">my_macro_user/Cargo.toml</span>
+
+```toml
+[dependencies]
+my_macro_crate = { path = "<relative path to my_macro_crate>" }
+```
+
+<span class="filename">my_macro_user/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate my_macro_crate;
+
+use my_macro_crate::panics_note;
+
+/// Do the `foo` thing.
+#[panics_note = "Always."]
+pub fn foo() {
+    panic!()
+}
+```
+
+Then the rendered documentation for `pub fn foo` will look like this:
+
+> `pub fn foo()`
+> 
+> ----
+> Do the `foo` thing.
+> # Panics
+> Always.
+
+[RFC 1566]: https://github.com/rust-lang/rfcs/blob/master/text/1566-proc-macros.md
+[custom derives]: https://doc.rust-lang.org/book/procedural-macros.html
+[rust-lang/rust#41430]: https://github.com/rust-lang/rust/issues/41430
+[refr-attr]: https://doc.rust-lang.org/reference/attributes.html
index 1e9e1eaafda46dade9f4c92cc49035259251d6e0..69857297582da4068b9c3c464d818657c7ce6877 100644 (file)
@@ -4,7 +4,8 @@ The tracking issue for this feature is: [#23121]
 
 [#23121]: https://github.com/rust-lang/rust/issues/23121
 
-See also [`advanced_slice_patterns`](advanced-slice-patterns.html).
+See also
+[`advanced_slice_patterns`](language-features/advanced-slice-patterns.html).
 
 ------------------------
 
index 9bffa2ff99bf3d0db7fdcc0455d610fe9b187115..18ff838dd32b961743f14b03a95d13d0d14790f9 100644 (file)
@@ -4,7 +4,7 @@ The tracking issue for this feature is: [#33082]
 
 [#33082]: https://github.com/rust-lang/rust/issues/33082
 
-See also [`alloc_system`](alloc-system.html).
+See also [`alloc_system`](library-features/alloc-system.html).
 
 ------------------------
 
index 6fa89179d8e1150c54e4367b394ae3f60d1214f3..1d261db6ba1b3f810a08aa3892837c99cdeaf280 100644 (file)
@@ -4,7 +4,7 @@ The tracking issue for this feature is: [#33082]
 
 [#33082]: https://github.com/rust-lang/rust/issues/33082
 
-See also [`alloc_jemalloc`](alloc-jemalloc.html).
+See also [`alloc_jemalloc`](library-features/alloc-jemalloc.html).
 
 ------------------------
 
diff --git a/src/doc/unstable-book/src/library-features/vec-resize-default.md b/src/doc/unstable-book/src/library-features/vec-resize-default.md
new file mode 100644 (file)
index 0000000..5803d32
--- /dev/null
@@ -0,0 +1,7 @@
+# `vec_resize_default`
+
+The tracking issue for this feature is: [#41758]
+
+[#41758]: https://github.com/rust-lang/rust/issues/41758
+
+------------------------
index 7ec5c29de6b4be49918d8bc5678f51a0ef665d29..1cf713290d8e8b7e7ea65b6b9af21f961a73c337 100644 (file)
@@ -1220,11 +1220,14 @@ pub fn split_off(&mut self, at: usize) -> Self {
 }
 
 impl<T: Clone> Vec<T> {
-    /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
+    /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
     ///
-    /// If `new_len` is greater than `len()`, the `Vec` is extended by the
+    /// If `new_len` is greater than `len`, the `Vec` is extended by the
     /// difference, with each additional slot filled with `value`.
-    /// If `new_len` is less than `len()`, the `Vec` is simply truncated.
+    /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+    ///
+    /// This method requires `Clone` to clone the passed value. If you'd
+    /// rather create a value with `Default` instead, see [`resize_default`].
     ///
     /// # Examples
     ///
@@ -1237,19 +1240,100 @@ impl<T: Clone> Vec<T> {
     /// vec.resize(2, 0);
     /// assert_eq!(vec, [1, 2]);
     /// ```
+    ///
+    /// [`resize_default`]: #method.resize_default
     #[stable(feature = "vec_resize", since = "1.5.0")]
     pub fn resize(&mut self, new_len: usize, value: T) {
         let len = self.len();
 
         if new_len > len {
-            self.extend_with_element(new_len - len, value);
+            self.extend_with(new_len - len, ExtendElement(value))
+        } else {
+            self.truncate(new_len);
+        }
+    }
+
+    /// Clones and appends all elements in a slice to the `Vec`.
+    ///
+    /// Iterates over the slice `other`, clones each element, and then appends
+    /// it to this `Vec`. The `other` vector is traversed in-order.
+    ///
+    /// Note that this function is same as `extend` except that it is
+    /// specialized to work with slices instead. If and when Rust gets
+    /// specialization this function will likely be deprecated (but still
+    /// available).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1];
+    /// vec.extend_from_slice(&[2, 3, 4]);
+    /// assert_eq!(vec, [1, 2, 3, 4]);
+    /// ```
+    #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
+    pub fn extend_from_slice(&mut self, other: &[T]) {
+        self.spec_extend(other.iter())
+    }
+}
+
+impl<T: Default> Vec<T> {
+    /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+    ///
+    /// If `new_len` is greater than `len`, the `Vec` is extended by the
+    /// difference, with each additional slot filled with `Default::default()`.
+    /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+    ///
+    /// This method uses `Default` to create new values on every push. If
+    /// you'd rather `Clone` a given value, use [`resize`].
+    ///
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(vec_resize_default)]
+    ///
+    /// let mut vec = vec![1, 2, 3];
+    /// vec.resize_default(5);
+    /// assert_eq!(vec, [1, 2, 3, 0, 0]);
+    ///
+    /// let mut vec = vec![1, 2, 3, 4];
+    /// vec.resize_default(2);
+    /// assert_eq!(vec, [1, 2]);
+    /// ```
+    ///
+    /// [`resize`]: #method.resize
+    #[unstable(feature = "vec_resize_default", issue = "41758")]
+    pub fn resize_default(&mut self, new_len: usize) {
+        let len = self.len();
+
+        if new_len > len {
+            self.extend_with(new_len - len, ExtendDefault);
         } else {
             self.truncate(new_len);
         }
     }
+}
 
-    /// Extend the vector by `n` additional clones of `value`.
-    fn extend_with_element(&mut self, n: usize, value: T) {
+// This code generalises `extend_with_{element,default}`.
+trait ExtendWith<T> {
+    fn next(&self) -> T;
+    fn last(self) -> T;
+}
+
+struct ExtendElement<T>(T);
+impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
+    fn next(&self) -> T { self.0.clone() }
+    fn last(self) -> T { self.0 }
+}
+
+struct ExtendDefault;
+impl<T: Default> ExtendWith<T> for ExtendDefault {
+    fn next(&self) -> T { Default::default() }
+    fn last(self) -> T { Default::default() }
+}
+impl<T> Vec<T> {
+    /// Extend the vector by `n` values, using the given generator.
+    fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, value: E) {
         self.reserve(n);
 
         unsafe {
@@ -1261,43 +1345,21 @@ fn extend_with_element(&mut self, n: usize, value: T) {
 
             // Write all elements except the last one
             for _ in 1..n {
-                ptr::write(ptr, value.clone());
+                ptr::write(ptr, value.next());
                 ptr = ptr.offset(1);
-                // Increment the length in every step in case clone() panics
+                // Increment the length in every step in case next() panics
                 local_len.increment_len(1);
             }
 
             if n > 0 {
                 // We can write the last element directly without cloning needlessly
-                ptr::write(ptr, value);
+                ptr::write(ptr, value.last());
                 local_len.increment_len(1);
             }
 
             // len set by scope guard
         }
     }
-
-    /// Clones and appends all elements in a slice to the `Vec`.
-    ///
-    /// Iterates over the slice `other`, clones each element, and then appends
-    /// it to this `Vec`. The `other` vector is traversed in-order.
-    ///
-    /// Note that this function is same as `extend` except that it is
-    /// specialized to work with slices instead. If and when Rust gets
-    /// specialization this function will likely be deprecated (but still
-    /// available).
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// let mut vec = vec![1];
-    /// vec.extend_from_slice(&[2, 3, 4]);
-    /// assert_eq!(vec, [1, 2, 3, 4]);
-    /// ```
-    #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
-    pub fn extend_from_slice(&mut self, other: &[T]) {
-        self.spec_extend(other.iter())
-    }
 }
 
 // Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
@@ -1389,7 +1451,7 @@ trait SpecFromElem: Sized {
 impl<T: Clone> SpecFromElem for T {
     default fn from_elem(elem: Self, n: usize) -> Vec<Self> {
         let mut v = Vec::with_capacity(n);
-        v.extend_with_element(n, elem);
+        v.extend_with(n, ExtendElement(elem));
         v
     }
 }
@@ -1424,7 +1486,7 @@ fn from_elem(elem: $t, n: usize) -> Vec<$t> {
                     }
                 }
                 let mut v = Vec::with_capacity(n);
-                v.extend_with_element(n, elem);
+                v.extend_with(n, ExtendElement(elem));
                 v
             }
         }
index 87def375b202b17e2e38c85cc62453e911b7992d..4825c2aa132640abc14c27cc5e2f7ce040ed3928 100644 (file)
@@ -21,7 +21,7 @@ fn float_to_decimal_common_exact<T>(fmt: &mut Formatter, num: &T,
 {
     unsafe {
         let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
-        let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
+        let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
         let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact,
                                                     *num, sign, precision,
                                                     false, &mut buf, &mut parts);
@@ -39,7 +39,7 @@ fn float_to_decimal_common_shortest<T>(fmt: &mut Formatter,
     unsafe {
         // enough for f32 and f64
         let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
-        let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
+        let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
         let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest,
                                                  *num, sign, 0, false, &mut buf, &mut parts);
         fmt.pad_formatted_parts(&formatted)
@@ -75,7 +75,7 @@ fn float_to_exponential_common_exact<T>(fmt: &mut Formatter, num: &T,
 {
     unsafe {
         let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
-        let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
+        let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
         let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact,
                                                   *num, sign, precision,
                                                   upper, &mut buf, &mut parts);
@@ -94,7 +94,7 @@ fn float_to_exponential_common_shortest<T>(fmt: &mut Formatter,
     unsafe {
         // enough for f32 and f64
         let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
-        let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
+        let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
         let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest,
                                                      *num, sign, (0, 0), upper,
                                                      &mut buf, &mut parts);
index 5123e42df61ca8e013943c5d2be7f1f561d57299..74b9e7bf37d5110a754d51a532d2eef5e9ea8d00 100644 (file)
@@ -410,8 +410,8 @@ fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static
 /// it will only print given digits and nothing else.
 ///
 /// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
-/// There should be at least 5 parts available, due to the worst case like
-/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
+/// There should be at least 4 parts available, due to the worst case like
+/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
 pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T,
                                  sign: Sign, frac_digits: usize, _upper: bool,
                                  buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
@@ -465,8 +465,8 @@ pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T,
 /// cannot be in this range, avoiding any confusion.
 ///
 /// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
-/// There should be at least 7 parts available, due to the worst case like
-/// `[+][1][.][2345][e][-][67]`.
+/// There should be at least 6 parts available, due to the worst case like
+/// `[+][1][.][2345][e][-][6]`.
 pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T,
                                      sign: Sign, dec_bounds: (i16, i16), upper: bool,
                                      buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
@@ -544,8 +544,8 @@ fn estimate_max_buf_len(exp: i16) -> usize {
 /// The byte buffer should be at least `ndigits` bytes long unless `ndigits` is
 /// so large that only the fixed number of digits will be ever written.
 /// (The tipping point for `f64` is about 800, so 1000 bytes should be enough.)
-/// There should be at least 7 parts available, due to the worst case like
-/// `[+][1][.][2345][e][-][67]`.
+/// There should be at least 6 parts available, due to the worst case like
+/// `[+][1][.][2345][e][-][6]`.
 pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T,
                                   sign: Sign, ndigits: usize, upper: bool,
                                   buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
@@ -600,8 +600,8 @@ pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T,
 /// The byte buffer should be enough for the output unless `frac_digits` is
 /// so large that only the fixed number of digits will be ever written.
 /// (The tipping point for `f64` is about 800, and 1000 bytes should be enough.)
-/// There should be at least 5 parts available, due to the worst case like
-/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
+/// There should be at least 4 parts available, due to the worst case like
+/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
 pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T,
                                     sign: Sign, frac_digits: usize, _upper: bool,
                                     buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
index 013f02685bad117149951d86e56b9c3f662d6f71..6cc374b13b7b3042257f13190f47f0bf95bdbd08 100644 (file)
@@ -12,6 +12,7 @@
 
 use ops::*;
 
+#[allow(unused_macros)]
 macro_rules! sh_impl_signed {
     ($t:ident, $f:ident) => (
         #[stable(feature = "rust1", since = "1.0.0")]
index 6942f5fb67b60069143f5ed301cffb6e0fef0961..fc3af096b183880c4b3a61910e4fe5eae73e904d 100644 (file)
@@ -799,6 +799,7 @@ macro_rules! neg_impl_numeric {
     ($($t:ty)*) => { neg_impl_core!{ x => -x, $($t)*} }
 }
 
+#[allow(unused_macros)]
 macro_rules! neg_impl_unsigned {
     ($($t:ty)*) => {
         neg_impl_core!{ x => {
index ad71584b61a0fbf611965a1bc8af7758494f7073..09675564291a248b4ded93dddccee6aa6371a73f 100644 (file)
@@ -133,6 +133,14 @@ fn quote(&self) -> TokenStream {
     }
 }
 
+impl Quote for usize {
+    fn quote(&self) -> TokenStream {
+        let integer_symbol = Symbol::intern(&self.to_string());
+        TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
+            .into()
+    }
+}
+
 impl Quote for Ident {
     fn quote(&self) -> TokenStream {
         // FIXME(jseyfried) quote hygiene
@@ -193,15 +201,17 @@ macro_rules! gen_match {
 impl Quote for Lit {
     fn quote(&self) -> TokenStream {
         macro_rules! gen_match {
-            ($($i:ident),*) => {
+            ($($i:ident),*; $($raw:ident),*) => {
                 match *self {
                     $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
-                    _ => panic!("Unsupported literal"),
+                    $( Lit::$raw(lit, n) => {
+                        quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
+                    })*
                 }
             }
         }
 
-        gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
+        gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
     }
 }
 
index fa217acd9f9bf5e4eed1ea2feaede465f4450cbd..9d64f511914d6d3c2ec5b16a95a28886a2f3c8ba 100644 (file)
@@ -13,12 +13,12 @@ arena = { path = "../libarena" }
 fmt_macros = { path = "../libfmt_macros" }
 graphviz = { path = "../libgraphviz" }
 log = "0.3"
+owning_ref = "0.3.3"
 rustc_back = { path = "../librustc_back" }
 rustc_bitflags = { path = "../librustc_bitflags" }
 rustc_const_math = { path = "../librustc_const_math" }
 rustc_data_structures = { path = "../librustc_data_structures" }
 rustc_errors = { path = "../librustc_errors" }
-rustc_llvm = { path = "../librustc_llvm" }
 serialize = { path = "../libserialize" }
 syntax = { path = "../libsyntax" }
 syntax_pos = { path = "../libsyntax_pos" }
index 25fc5b7a4f6d9f9c9ff2e9b101205a3dad18a20e..15c4469b74694c98d3108c0002a9840329f6b53f 100644 (file)
@@ -106,6 +106,8 @@ pub enum DepNode<D: Clone + Debug> {
     UsedTraitImports(D),
     ConstEval(D),
     SymbolName(D),
+    SpecializationGraph(D),
+    ObjectSafety(D),
 
     // The set of impls for a given trait. Ultimately, it would be
     // nice to get more fine-grained here (e.g., to include a
@@ -116,6 +118,8 @@ pub enum DepNode<D: Clone + Debug> {
     // than changes in the impl body.
     TraitImpls(D),
 
+    AllLocalTraitImpls,
+
     // Nodes representing caches. To properly handle a true cache, we
     // don't use a DepTrackingMap, but rather we push a task node.
     // Otherwise the write into the map would be incorrectly
@@ -262,7 +266,10 @@ pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
             UsedTraitImports(ref d) => op(d).map(UsedTraitImports),
             ConstEval(ref d) => op(d).map(ConstEval),
             SymbolName(ref d) => op(d).map(SymbolName),
+            SpecializationGraph(ref d) => op(d).map(SpecializationGraph),
+            ObjectSafety(ref d) => op(d).map(ObjectSafety),
             TraitImpls(ref d) => op(d).map(TraitImpls),
+            AllLocalTraitImpls => Some(AllLocalTraitImpls),
             TraitItems(ref d) => op(d).map(TraitItems),
             ReprHints(ref d) => op(d).map(ReprHints),
             TraitSelect { ref trait_def_id, ref input_def_id } => {
index 8ef42826faca5cc2d00043951c4a785f67c5f886..470dcb4bd61e10e431bf8e3392e3734c288081c7 100644 (file)
@@ -409,6 +409,67 @@ impl Quux for Foo { }
 [iss15872]: https://github.com/rust-lang/rust/issues/15872
 "##,
 
+E0119: r##"
+There are conflicting trait implementations for the same type.
+Example of erroneous code:
+
+```compile_fail,E0119
+trait MyTrait {
+    fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+    fn get(&self) -> usize { 0 }
+}
+
+struct Foo {
+    value: usize
+}
+
+impl MyTrait for Foo { // error: conflicting implementations of trait
+                       //        `MyTrait` for type `Foo`
+    fn get(&self) -> usize { self.value }
+}
+```
+
+When looking for the implementation for the trait, the compiler finds
+both the `impl<T> MyTrait for T` where T is all types and the `impl
+MyTrait for Foo`. Since a trait cannot be implemented multiple times,
+this is an error. So, when you write:
+
+```
+trait MyTrait {
+    fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+    fn get(&self) -> usize { 0 }
+}
+```
+
+This makes the trait implemented on all types in the scope. So if you
+try to implement it on another one after that, the implementations will
+conflict. Example:
+
+```
+trait MyTrait {
+    fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+    fn get(&self) -> usize { 0 }
+}
+
+struct Foo;
+
+fn main() {
+    let f = Foo;
+
+    f.get(); // the trait is implemented so we can use it
+}
+```
+"##,
+
 E0133: r##"
 Unsafe code was used outside of an unsafe function or block.
 
index c715484a934df37afc1ed353c9cc0fd73cbff14b..868730edfedda83b1d4d10ef586ca3f37b75dc58 100644 (file)
@@ -497,7 +497,7 @@ pub fn ty_param_name(&self, id: NodeId) -> Name {
     }
 
     pub fn trait_impls(&self, trait_did: DefId) -> &'hir [NodeId] {
-        self.dep_graph.read(DepNode::TraitImpls(trait_did));
+        self.dep_graph.read(DepNode::AllLocalTraitImpls);
 
         // NB: intentionally bypass `self.forest.krate()` so that we
         // do not trigger a read of the whole krate here
@@ -505,7 +505,7 @@ pub fn trait_impls(&self, trait_did: DefId) -> &'hir [NodeId] {
     }
 
     pub fn trait_default_impl(&self, trait_did: DefId) -> Option<NodeId> {
-        self.dep_graph.read(DepNode::TraitImpls(trait_did));
+        self.dep_graph.read(DepNode::AllLocalTraitImpls);
 
         // NB: intentionally bypass `self.forest.krate()` so that we
         // do not trigger a read of the whole krate here
index e760f7efc93d912645454213f9b42b5707836261..ccdbab88b8b9c17e9e9916804d194c2d953392ae 100644 (file)
@@ -94,3 +94,11 @@ fn finish(mut hasher: stable_hasher::StableHasher<Self>) -> Self {
         fingerprint
     }
 }
+
+impl<CTX> stable_hasher::HashStable<CTX> for Fingerprint {
+    fn hash_stable<W: stable_hasher::StableHasherResult>(&self,
+                                          _: &mut CTX,
+                                          hasher: &mut stable_hasher::StableHasher<W>) {
+        ::std::hash::Hash::hash(&self.0, hasher);
+    }
+}
index 3a6367c353c1eec7cdcc32a756281ef81c8f528e..786d1c5035d96fa388278aa906c362e2c6467ffa 100644 (file)
@@ -16,7 +16,7 @@
 use util::nodemap::NodeMap;
 
 use std::hash as std_hash;
-use std::collections::{HashMap, HashSet};
+use std::collections::{HashMap, HashSet, BTreeMap};
 
 use syntax::ast;
 use syntax::attr;
@@ -348,3 +348,25 @@ pub fn hash_stable_nodemap<'a, 'tcx, V, W>(hcx: &mut StableHashingContext<'a, 't
         hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
     });
 }
+
+
+pub fn hash_stable_btreemap<'a, 'tcx, K, V, SK, F, W>(hcx: &mut StableHashingContext<'a, 'tcx>,
+                                                      hasher: &mut StableHasher<W>,
+                                                      map: &BTreeMap<K, V>,
+                                                      extract_stable_key: F)
+    where K: Eq + Ord,
+          V: HashStable<StableHashingContext<'a, 'tcx>>,
+          SK: HashStable<StableHashingContext<'a, 'tcx>> + Ord + Clone,
+          F: Fn(&mut StableHashingContext<'a, 'tcx>, &K) -> SK,
+          W: StableHasherResult,
+{
+    let mut keys: Vec<_> = map.keys()
+                              .map(|k| (extract_stable_key(hcx, k), k))
+                              .collect();
+    keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
+    keys.len().hash_stable(hcx, hasher);
+    for (stable_key, key) in keys {
+        stable_key.hash_stable(hcx, hasher);
+        map[key].hash_stable(hcx, hasher);
+    }
+}
index d881a1cc45a79a56da6c1a420b278068e6264bfb..5b23809085053d09b2cf6a821a25140dc9315602 100644 (file)
@@ -13,7 +13,8 @@
 pub use self::fingerprint::Fingerprint;
 pub use self::caching_codemap_view::CachingCodemapView;
 pub use self::hcx::{StableHashingContext, NodeIdHashingMode, hash_stable_hashmap,
-                    hash_stable_hashset, hash_stable_nodemap};
+                    hash_stable_hashset, hash_stable_nodemap,
+                    hash_stable_btreemap};
 mod fingerprint;
 mod caching_codemap_view;
 mod hcx;
index 18909a784d6106298aec4d8ad3b7b5e3b5250bd2..aabb6aff55140c4138efecfa741db1245b2e0e4b 100644 (file)
 use super::InferCtxt;
 use super::{MiscVariable, TypeTrace};
 
+use hir::def_id::DefId;
 use ty::{IntType, UintType};
 use ty::{self, Ty, TyCtxt};
 use ty::error::TypeError;
 use ty::relate::{self, Relate, RelateResult, TypeRelation};
+use ty::subst::Substs;
 use traits::{Obligation, PredicateObligations};
 
 use syntax::ast;
@@ -336,6 +338,23 @@ fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
         Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?))
     }
 
+    fn relate_item_substs(&mut self,
+                          item_def_id: DefId,
+                          a_subst: &'tcx Substs<'tcx>,
+                          b_subst: &'tcx Substs<'tcx>)
+                          -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+    {
+        if self.ambient_variance == ty::Variance::Invariant {
+            // Avoid fetching the variance if we are in an invariant
+            // context; no need, and it can induce dependency cycles
+            // (e.g. #41849).
+            relate::relate_substs(self, None, a_subst, b_subst)
+        } else {
+            let opt_variances = self.tcx().variances_of(item_def_id);
+            relate::relate_substs(self, Some(&opt_variances), a_subst, b_subst)
+        }
+    }
+
     fn relate_with_variance<T: Relate<'tcx>>(&mut self,
                                              variance: ty::Variance,
                                              a: &T,
index 5cf26ea8bfca596db6575dc9ec4cc46066f1cc88..f32ee7900646b0e4f1c82e9b415362e85dbc533d 100644 (file)
@@ -54,7 +54,7 @@
 extern crate getopts;
 extern crate graphviz;
 extern crate libc;
-extern crate rustc_llvm as llvm;
+extern crate owning_ref;
 extern crate rustc_back;
 extern crate rustc_data_structures;
 extern crate serialize;
index e681d55cf94b893db2fd37f65eb7e5a422bc4dda..07140f71aebaa1b68253f71e3aef28372800d397 100644 (file)
     "detects unreachable patterns"
 }
 
+declare_lint! {
+    pub UNUSED_MACROS,
+    Warn,
+    "detects macros that were not used"
+}
+
 declare_lint! {
     pub WARNINGS,
     Warn,
@@ -259,6 +265,7 @@ fn get_lints(&self) -> LintArray {
             DEAD_CODE,
             UNREACHABLE_CODE,
             UNREACHABLE_PATTERNS,
+            UNUSED_MACROS,
             WARNINGS,
             UNUSED_FEATURES,
             STABLE_FEATURES,
index 60e671f1772e382e87841dca3fe0a888b657a064..9d5ba2c8f950101547a810d5ea2a398f044249f0 100644 (file)
@@ -49,6 +49,7 @@
 use hir::def_id::LOCAL_CRATE;
 use hir::intravisit as hir_visit;
 use syntax::visit as ast_visit;
+use syntax::tokenstream::ThinTokenStream;
 
 /// Information about the registered lints.
 ///
@@ -1125,6 +1126,13 @@ fn visit_path_list_item(&mut self, prefix: &'a ast::Path, item: &'a ast::PathLis
     fn visit_attribute(&mut self, attr: &'a ast::Attribute) {
         run_lints!(self, check_attribute, early_passes, attr);
     }
+
+    fn visit_mac_def(&mut self, _mac: &'a ThinTokenStream, id: ast::NodeId) {
+        let lints = self.sess.lints.borrow_mut().take(id);
+        for early_lint in lints {
+            self.early_lint(&early_lint);
+        }
+    }
 }
 
 enum CheckLintNameResult {
index 569b1aeeb09d7ca1a2e2323746295a89b24708c7..a68aca4600054ac2ba3e9d87b7df727ad2d0f09b 100644 (file)
@@ -36,8 +36,9 @@
 use util::nodemap::{NodeSet, DefIdMap};
 
 use std::any::Any;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
 use std::rc::Rc;
+use owning_ref::ErasedBoxRef;
 use syntax::ast;
 use syntax::ext::base::SyntaxExtension;
 use syntax::symbol::Symbol;
@@ -201,11 +202,33 @@ pub fn new() -> EncodedMetadataHashes {
     }
 }
 
+/// The backend's way to give the crate store access to the metadata in a library.
+/// Note that it returns the raw metadata bytes stored in the library file, whether
+/// it is compressed, uncompressed, some weird mix, etc.
+/// rmeta files are backend independent and not handled here.
+///
+/// At the time of this writing, there is only one backend and one way to store
+/// metadata in library -- this trait just serves to decouple rustc_metadata from
+/// the archive reader, which depends on LLVM.
+pub trait MetadataLoader {
+    fn get_rlib_metadata(&self,
+                         target: &Target,
+                         filename: &Path)
+                         -> Result<ErasedBoxRef<[u8]>, String>;
+    fn get_dylib_metadata(&self,
+                          target: &Target,
+                          filename: &Path)
+                          -> Result<ErasedBoxRef<[u8]>, String>;
+}
+
 /// A store of Rust crates, through with their metadata
 /// can be accessed.
 pub trait CrateStore {
     fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc<Any>;
 
+    // access to the metadata loader
+    fn metadata_loader(&self) -> &MetadataLoader;
+
     // item info
     fn visibility(&self, def: DefId) -> ty::Visibility;
     fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>>;
@@ -275,8 +298,6 @@ fn item_body<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
     fn used_link_args(&self) -> Vec<String>;
 
     // utility functions
-    fn metadata_filename(&self) -> &str;
-    fn metadata_section_name(&self, target: &Target) -> &str;
     fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>;
     fn used_crate_source(&self, cnum: CrateNum) -> CrateSource;
     fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
@@ -413,8 +434,6 @@ fn used_libraries(&self) -> Vec<NativeLibrary> { vec![] }
     fn used_link_args(&self) -> Vec<String> { vec![] }
 
     // utility functions
-    fn metadata_filename(&self) -> &str { bug!("metadata_filename") }
-    fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") }
     fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
         { vec![] }
     fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") }
@@ -427,6 +446,9 @@ fn encode_metadata<'a, 'tcx>(&self,
         bug!("encode_metadata")
     }
     fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
+
+    // access to the metadata loader
+    fn metadata_loader(&self) -> &MetadataLoader { bug!("metadata_loader") }
 }
 
 pub trait CrateLoader {
index 884a71f0d32d4b24c983cb9049ca4ba819a9f26d..7cb5f2510d5c7acdc363048165bca73f44d2d85d 100644 (file)
@@ -328,7 +328,7 @@ pub struct Options {
     }
 );
 
-#[derive(Clone, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
 pub enum PrintRequest {
     FileNames,
     Sysroot,
@@ -824,9 +824,9 @@ fn parse_optimization_fuel(slot: &mut Option<(String, u64)>, v: Option<&str>) ->
     linker: Option<String> = (None, parse_opt_string, [UNTRACKED],
         "system linker to link outputs with"),
     link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
-        "a single extra argument to pass to the linker (can be used several times)"),
+        "a single extra argument to append to the linker invocation (can be used several times)"),
     link_args: Option<Vec<String>> = (None, parse_opt_list, [UNTRACKED],
-        "extra arguments to pass to the linker (space separated)"),
+        "extra arguments to append to the linker invocation (space separated)"),
     link_dead_code: bool = (false, parse_bool, [UNTRACKED],
         "don't let linker strip dead code (turning it on can be used for code coverage)"),
     lto: bool = (false, parse_bool, [TRACKED],
@@ -963,7 +963,7 @@ fn parse_optimization_fuel(slot: &mut Option<(String, u64)>, v: Option<&str>) ->
           "attempt to recover from parse errors (experimental)"),
     incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
           "enable incremental compilation (experimental)"),
-    incremental_cc: bool = (false, parse_bool, [UNTRACKED],
+    incremental_cc: bool = (true, parse_bool, [UNTRACKED],
           "enable cross-crate incremental compilation (even more experimental)"),
     incremental_info: bool = (false, parse_bool, [UNTRACKED],
         "print high-level information about incremental reuse (or the lack thereof)"),
@@ -1029,6 +1029,10 @@ fn parse_optimization_fuel(slot: &mut Option<(String, u64)>, v: Option<&str>) ->
         "add a mapping target to the file path remapping config"),
     force_unstable_if_unmarked: bool = (false, parse_bool, [TRACKED],
         "force all crates to be `rustc_private` unstable"),
+    pre_link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
+        "a single extra argument to prepend the linker invocation (can be used several times)"),
+    pre_link_args: Option<Vec<String>> = (None, parse_opt_list, [UNTRACKED],
+        "extra arguments to prepend to the linker invocation (space separated)"),
 }
 
 pub fn default_lib_output() -> CrateType {
index 2e2d5a6bd4d387e8f2cf32208270cd2f63b24876..814246330a4c2f7b8da5e640ebbb536c83910423 100644 (file)
 use rustc_back::{LinkerFlavor, PanicStrategy};
 use rustc_back::target::Target;
 use rustc_data_structures::flock;
-use llvm;
 
 use std::path::{Path, PathBuf};
 use std::cell::{self, Cell, RefCell};
 use std::collections::HashMap;
 use std::env;
-use std::ffi::CString;
 use std::io::Write;
 use std::rc::Rc;
 use std::fmt;
 use std::time::Duration;
 use std::sync::Arc;
-use libc::c_int;
 
 mod code_stats;
 pub mod config;
@@ -713,8 +710,6 @@ pub fn build_session_(sopts: config::Options,
         out_of_fuel: Cell::new(false),
     };
 
-    init_llvm(&sess);
-
     sess
 }
 
@@ -743,55 +738,6 @@ pub enum IncrCompSession {
     }
 }
 
-fn init_llvm(sess: &Session) {
-    unsafe {
-        // Before we touch LLVM, make sure that multithreading is enabled.
-        use std::sync::Once;
-        static INIT: Once = Once::new();
-        static mut POISONED: bool = false;
-        INIT.call_once(|| {
-            if llvm::LLVMStartMultithreaded() != 1 {
-                // use an extra bool to make sure that all future usage of LLVM
-                // cannot proceed despite the Once not running more than once.
-                POISONED = true;
-            }
-
-            configure_llvm(sess);
-        });
-
-        if POISONED {
-            bug!("couldn't enable multi-threaded LLVM");
-        }
-    }
-}
-
-unsafe fn configure_llvm(sess: &Session) {
-    let mut llvm_c_strs = Vec::new();
-    let mut llvm_args = Vec::new();
-
-    {
-        let mut add = |arg: &str| {
-            let s = CString::new(arg).unwrap();
-            llvm_args.push(s.as_ptr());
-            llvm_c_strs.push(s);
-        };
-        add("rustc"); // fake program name
-        if sess.time_llvm_passes() { add("-time-passes"); }
-        if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
-
-        for arg in &sess.opts.cg.llvm_args {
-            add(&(*arg));
-        }
-    }
-
-    llvm::LLVMInitializePasses();
-
-    llvm::initialize_available_targets();
-
-    llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,
-                                 llvm_args.as_ptr());
-}
-
 pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
     let emitter: Box<Emitter> = match output {
         config::ErrorOutputType::HumanReadable(color_config) => {
index 2f525e1b8b45c5afcba01f3f6fc720c09ecd81e5..1823373348badfb8ef177602d852f4ed29ff0217 100644 (file)
@@ -619,8 +619,6 @@ pub fn get_vtable_methods<'a, 'tcx>(
     debug!("get_vtable_methods({:?})", trait_ref);
 
     supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
-        tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id());
-
         let trait_methods = tcx.associated_items(trait_ref.def_id())
             .filter(|item| item.kind == ty::AssociatedKind::Method);
 
@@ -782,3 +780,19 @@ fn self_ty(&self) -> ty::Binder<Ty<'tcx>> {
         ty::Binder(self.predicate.skip_binder().self_ty())
     }
 }
+
+pub fn provide(providers: &mut ty::maps::Providers) {
+    *providers = ty::maps::Providers {
+        is_object_safe: object_safety::is_object_safe_provider,
+        specialization_graph_of: specialize::specialization_graph_provider,
+        ..*providers
+    };
+}
+
+pub fn provide_extern(providers: &mut ty::maps::Providers) {
+    *providers = ty::maps::Providers {
+        is_object_safe: object_safety::is_object_safe_provider,
+        specialization_graph_of: specialize::specialization_graph_provider,
+        ..*providers
+    };
+}
index 66e8e503be40ba462f79fccab07a76268bf8ccba..0e3a53129d157fe2af455cacd25a59597c87b22f 100644 (file)
@@ -77,25 +77,6 @@ pub enum MethodViolationCode {
 }
 
 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
-    pub fn is_object_safe(self, trait_def_id: DefId) -> bool {
-        // Because we query yes/no results frequently, we keep a cache:
-        let def = self.trait_def(trait_def_id);
-
-        let result = def.object_safety().unwrap_or_else(|| {
-            let result = self.object_safety_violations(trait_def_id).is_empty();
-
-            // Record just a yes/no result in the cache; this is what is
-            // queried most frequently. Note that this may overwrite a
-            // previous result, but always with the same thing.
-            def.set_object_safety(result);
-
-            result
-        });
-
-        debug!("is_object_safe({:?}) = {}", trait_def_id, result);
-
-        result
-    }
 
     /// Returns the object safety violations that affect
     /// astconv - currently, Self in supertraits. This is needed
@@ -391,3 +372,9 @@ fn contains_illegal_self_type_reference(self,
         error
     }
 }
+
+pub(super) fn is_object_safe_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                         trait_def_id: DefId)
+                                         -> bool {
+    tcx.object_safety_violations(trait_def_id).is_empty()
+}
index e01f97eb1f3a0340dbe635139ed7985becf977b6..d7911870f391a541dfd36297c3437999350603b1 100644 (file)
@@ -900,96 +900,50 @@ fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>(
                 // In either case, we handle this by not adding a
                 // candidate for an impl if it contains a `default`
                 // type.
-                let opt_node_item = assoc_ty_def(selcx,
-                                                 impl_data.impl_def_id,
-                                                 obligation.predicate.item_name);
-                let new_candidate = if let Some(node_item) = opt_node_item {
-                    let is_default = if node_item.node.is_from_trait() {
-                        // If true, the impl inherited a `type Foo = Bar`
-                        // given in the trait, which is implicitly default.
-                        // Otherwise, the impl did not specify `type` and
-                        // neither did the trait:
-                        //
-                        // ```rust
-                        // trait Foo { type T; }
-                        // impl Foo for Bar { }
-                        // ```
-                        //
-                        // This is an error, but it will be
-                        // reported in `check_impl_items_against_trait`.
-                        // We accept it here but will flag it as
-                        // an error when we confirm the candidate
-                        // (which will ultimately lead to `normalize_to_error`
-                        // being invoked).
-                        node_item.item.defaultness.has_value()
-                    } else {
-                        node_item.item.defaultness.is_default() ||
-                        selcx.tcx().impl_is_default(node_item.node.def_id())
-                    };
-
-                    // Only reveal a specializable default if we're past type-checking
-                    // and the obligations is monomorphic, otherwise passes such as
-                    // transmute checking and polymorphic MIR optimizations could
-                    // get a result which isn't correct for all monomorphizations.
-                    if !is_default {
+                let node_item = assoc_ty_def(selcx,
+                                             impl_data.impl_def_id,
+                                             obligation.predicate.item_name);
+
+                let is_default = if node_item.node.is_from_trait() {
+                    // If true, the impl inherited a `type Foo = Bar`
+                    // given in the trait, which is implicitly default.
+                    // Otherwise, the impl did not specify `type` and
+                    // neither did the trait:
+                    //
+                    // ```rust
+                    // trait Foo { type T; }
+                    // impl Foo for Bar { }
+                    // ```
+                    //
+                    // This is an error, but it will be
+                    // reported in `check_impl_items_against_trait`.
+                    // We accept it here but will flag it as
+                    // an error when we confirm the candidate
+                    // (which will ultimately lead to `normalize_to_error`
+                    // being invoked).
+                    node_item.item.defaultness.has_value()
+                } else {
+                    node_item.item.defaultness.is_default() ||
+                    selcx.tcx().impl_is_default(node_item.node.def_id())
+                };
+
+                // Only reveal a specializable default if we're past type-checking
+                // and the obligations is monomorphic, otherwise passes such as
+                // transmute checking and polymorphic MIR optimizations could
+                // get a result which isn't correct for all monomorphizations.
+                let new_candidate = if !is_default {
+                    Some(ProjectionTyCandidate::Select)
+                } else if selcx.projection_mode() == Reveal::All {
+                    assert!(!poly_trait_ref.needs_infer());
+                    if !poly_trait_ref.needs_subst() {
                         Some(ProjectionTyCandidate::Select)
-                    } else if selcx.projection_mode() == Reveal::All {
-                        assert!(!poly_trait_ref.needs_infer());
-                        if !poly_trait_ref.needs_subst() {
-                            Some(ProjectionTyCandidate::Select)
-                        } else {
-                            None
-                        }
                     } else {
                         None
                     }
                 } else {
-                    // This is saying that neither the trait nor
-                    // the impl contain a definition for this
-                    // associated type.  Normally this situation
-                    // could only arise through a compiler bug --
-                    // if the user wrote a bad item name, it
-                    // should have failed in astconv. **However**,
-                    // at coherence-checking time, we only look at
-                    // the topmost impl (we don't even consider
-                    // the trait itself) for the definition -- and
-                    // so in that case it may be that the trait
-                    // *DOES* have a declaration, but we don't see
-                    // it, and we end up in this branch.
-                    //
-                    // This is kind of tricky to handle actually.
-                    // For now, we just unconditionally ICE,
-                    // because otherwise, examples like the
-                    // following will succeed:
-                    //
-                    // ```
-                    // trait Assoc {
-                    //     type Output;
-                    // }
-                    //
-                    // impl<T> Assoc for T {
-                    //     default type Output = bool;
-                    // }
-                    //
-                    // impl Assoc for u8 {}
-                    // impl Assoc for u16 {}
-                    //
-                    // trait Foo {}
-                    // impl Foo for <u8 as Assoc>::Output {}
-                    // impl Foo for <u16 as Assoc>::Output {}
-                    //     return None;
-                    // }
-                    // ```
-                    //
-                    // The essential problem here is that the
-                    // projection fails, leaving two unnormalized
-                    // types, which appear not to unify -- so the
-                    // overlap check succeeds, when it should
-                    // fail.
-                    span_bug!(obligation.cause.span,
-                              "Tried to project an inherited associated type during \
-                               coherence checking, which is currently not supported.");
+                    None
                 };
+
                 candidate_set.vec.extend(new_candidate);
             }
             super::VtableParam(..) => {
@@ -1274,35 +1228,25 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>(
     let VtableImplData { substs, nested, impl_def_id } = impl_vtable;
 
     let tcx = selcx.tcx();
-    let trait_ref = obligation.predicate.trait_ref;
     let assoc_ty = assoc_ty_def(selcx, impl_def_id, obligation.predicate.item_name);
 
-    match assoc_ty {
-        Some(node_item) => {
-            let ty = if !node_item.item.defaultness.has_value() {
-                // This means that the impl is missing a definition for the
-                // associated type. This error will be reported by the type
-                // checker method `check_impl_items_against_trait`, so here we
-                // just return TyError.
-                debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
-                       node_item.item.name,
-                       obligation.predicate.trait_ref);
-                tcx.types.err
-            } else {
-                tcx.type_of(node_item.item.def_id)
-            };
-            let substs = translate_substs(selcx.infcx(), impl_def_id, substs, node_item.node);
-            Progress {
-                ty: ty.subst(tcx, substs),
-                obligations: nested,
-                cacheable: true
-            }
-        }
-        None => {
-            span_bug!(obligation.cause.span,
-                      "No associated type for {:?}",
-                      trait_ref);
-        }
+    let ty = if !assoc_ty.item.defaultness.has_value() {
+        // This means that the impl is missing a definition for the
+        // associated type. This error will be reported by the type
+        // checker method `check_impl_items_against_trait`, so here we
+        // just return TyError.
+        debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
+               assoc_ty.item.name,
+               obligation.predicate.trait_ref);
+        tcx.types.err
+    } else {
+        tcx.type_of(assoc_ty.item.def_id)
+    };
+    let substs = translate_substs(selcx.infcx(), impl_def_id, substs, assoc_ty.node);
+    Progress {
+        ty: ty.subst(tcx, substs),
+        obligations: nested,
+        cacheable: true
     }
 }
 
@@ -1315,27 +1259,43 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
     selcx: &SelectionContext<'cx, 'gcx, 'tcx>,
     impl_def_id: DefId,
     assoc_ty_name: ast::Name)
-    -> Option<specialization_graph::NodeItem<ty::AssociatedItem>>
+    -> specialization_graph::NodeItem<ty::AssociatedItem>
 {
-    let trait_def_id = selcx.tcx().impl_trait_ref(impl_def_id).unwrap().def_id;
-    let trait_def = selcx.tcx().trait_def(trait_def_id);
-
-    if !trait_def.is_complete(selcx.tcx()) {
-        let impl_node = specialization_graph::Node::Impl(impl_def_id);
-        for item in impl_node.items(selcx.tcx()) {
-            if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
-                return Some(specialization_graph::NodeItem {
-                    node: specialization_graph::Node::Impl(impl_def_id),
-                    item: item,
-                });
-            }
+    let tcx = selcx.tcx();
+    let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
+    let trait_def = tcx.trait_def(trait_def_id);
+
+    // This function may be called while we are still building the
+    // specialization graph that is queried below (via TraidDef::ancestors()),
+    // so, in order to avoid unnecessary infinite recursion, we manually look
+    // for the associated item at the given impl.
+    // If there is no such item in that impl, this function will fail with a
+    // cycle error if the specialization graph is currently being built.
+    let impl_node = specialization_graph::Node::Impl(impl_def_id);
+    for item in impl_node.items(tcx) {
+        if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
+            return specialization_graph::NodeItem {
+                node: specialization_graph::Node::Impl(impl_def_id),
+                item: item,
+            };
         }
-        None
+    }
+
+    if let Some(assoc_item) = trait_def
+        .ancestors(tcx, impl_def_id)
+        .defs(tcx, assoc_ty_name, ty::AssociatedKind::Type)
+        .next() {
+        assoc_item
     } else {
-        trait_def
-            .ancestors(impl_def_id)
-            .defs(selcx.tcx(), assoc_ty_name, ty::AssociatedKind::Type)
-            .next()
+        // This is saying that neither the trait nor
+        // the impl contain a definition for this
+        // associated type.  Normally this situation
+        // could only arise through a compiler bug --
+        // if the user wrote a bad item name, it
+        // should have failed in astconv.
+        bug!("No associated type `{}` for {}",
+             assoc_ty_name,
+             tcx.item_path_str(impl_def_id))
     }
 }
 
index 3882e218241646127edbc6be623c13062bc0699c..0e5779f9d17935b291fc1d72aec74fd879166c0f 100644 (file)
@@ -27,6 +27,7 @@
 use traits::{self, Reveal, ObligationCause};
 use ty::{self, TyCtxt, TypeFoldable};
 use syntax_pos::DUMMY_SP;
+use std::rc::Rc;
 
 pub mod specialization_graph;
 
@@ -118,7 +119,7 @@ pub fn find_associated_item<'a, 'tcx>(
     let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();
     let trait_def = tcx.trait_def(trait_def_id);
 
-    let ancestors = trait_def.ancestors(impl_data.impl_def_id);
+    let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
     match ancestors.defs(tcx, item.name, item.kind).next() {
         Some(node_item) => {
             let substs = tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
@@ -285,3 +286,62 @@ pub fn insert(&mut self, a: DefId, b: DefId, result: bool) {
         self.map.insert((a, b), result);
     }
 }
+
+// Query provider for `specialization_graph_of`.
+pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                                      trait_id: DefId)
+                                                      -> Rc<specialization_graph::Graph> {
+    let mut sg = specialization_graph::Graph::new();
+
+    let mut trait_impls: Vec<DefId> = tcx.trait_impls_of(trait_id).iter().collect();
+
+    // The coherence checking implementation seems to rely on impls being
+    // iterated over (roughly) in definition order, so we are sorting by
+    // negated CrateNum (so remote definitions are visited first) and then
+    // by a flattend version of the DefIndex.
+    trait_impls.sort_unstable_by_key(|def_id| {
+        (-(def_id.krate.as_u32() as i64),
+         def_id.index.address_space().index(),
+         def_id.index.as_array_index())
+    });
+
+    for impl_def_id in trait_impls {
+        if impl_def_id.is_local() {
+            // This is where impl overlap checking happens:
+            let insert_result = sg.insert(tcx, impl_def_id);
+            // Report error if there was one.
+            if let Err(overlap) = insert_result {
+                let mut err = struct_span_err!(tcx.sess,
+                                               tcx.span_of_impl(impl_def_id).unwrap(),
+                                               E0119,
+                                               "conflicting implementations of trait `{}`{}:",
+                                               overlap.trait_desc,
+                                               overlap.self_desc.clone().map_or(String::new(),
+                                                                                |ty| {
+                    format!(" for type `{}`", ty)
+                }));
+
+                match tcx.span_of_impl(overlap.with_impl) {
+                    Ok(span) => {
+                        err.span_label(span, format!("first implementation here"));
+                        err.span_label(tcx.span_of_impl(impl_def_id).unwrap(),
+                                       format!("conflicting implementation{}",
+                                                overlap.self_desc
+                                                    .map_or(String::new(),
+                                                            |ty| format!(" for `{}`", ty))));
+                    }
+                    Err(cname) => {
+                        err.note(&format!("conflicting implementation in crate `{}`", cname));
+                    }
+                }
+
+                err.emit();
+            }
+        } else {
+            let parent = tcx.impl_parent(impl_def_id).unwrap_or(trait_id);
+            sg.record_impl_from_cstore(tcx, parent, impl_def_id)
+        }
+    }
+
+    Rc::new(sg)
+}
index 6e2c16c82aeb42274fd30c21a4a058a66d607e00..87c98a0ef0ed6daf16839d7a96f712dd99bde1f4 100644 (file)
@@ -12,8 +12,9 @@
 
 use hir::def_id::DefId;
 use traits::{self, Reveal};
-use ty::{self, TyCtxt, TraitDef, TypeFoldable};
+use ty::{self, TyCtxt, TypeFoldable};
 use ty::fast_reject::{self, SimplifiedType};
+use std::rc::Rc;
 use syntax::ast::Name;
 use util::nodemap::{DefIdMap, FxHashMap};
 
@@ -301,18 +302,19 @@ pub fn def_id(&self) -> DefId {
     }
 }
 
-pub struct Ancestors<'a> {
-    trait_def: &'a TraitDef,
+pub struct Ancestors {
+    trait_def_id: DefId,
+    specialization_graph: Rc<Graph>,
     current_source: Option<Node>,
 }
 
-impl<'a> Iterator for Ancestors<'a> {
+impl Iterator for Ancestors {
     type Item = Node;
     fn next(&mut self) -> Option<Node> {
         let cur = self.current_source.take();
         if let Some(Node::Impl(cur_impl)) = cur {
-            let parent = self.trait_def.specialization_graph.borrow().parent(cur_impl);
-            if parent == self.trait_def.def_id {
+            let parent = self.specialization_graph.parent(cur_impl);
+            if parent == self.trait_def_id {
                 self.current_source = Some(Node::Trait(parent));
             } else {
                 self.current_source = Some(Node::Impl(parent));
@@ -336,7 +338,7 @@ pub fn map<U, F: FnOnce(T) -> U>(self, f: F) -> NodeItem<U> {
     }
 }
 
-impl<'a, 'gcx, 'tcx> Ancestors<'a> {
+impl<'a, 'gcx, 'tcx> Ancestors {
     /// Search the items from the given ancestors, returning each definition
     /// with the given name and the given kind.
     #[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
@@ -351,9 +353,14 @@ pub fn defs(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, name: Name, kind: ty::AssociatedK
 
 /// Walk up the specialization ancestors of a given impl, starting with that
 /// impl itself.
-pub fn ancestors<'a>(trait_def: &'a TraitDef, start_from_impl: DefId) -> Ancestors<'a> {
+pub fn ancestors(tcx: TyCtxt,
+                 trait_def_id: DefId,
+                 start_from_impl: DefId)
+                 -> Ancestors {
+    let specialization_graph = tcx.specialization_graph_of(trait_def_id);
     Ancestors {
-        trait_def: trait_def,
+        trait_def_id,
+        specialization_graph,
         current_source: Some(Node::Impl(start_from_impl)),
     }
 }
index 1fd9e8f73756e830ca1d0f238107770f03464348..85462bd9b1273ca2a40946ace10c0355abcb9212 100644 (file)
 use mir;
 use mir::transform::{MirSuite, MirPassIndex};
 use session::CompileResult;
+use traits::specialization_graph;
 use ty::{self, CrateInherentImpls, Ty, TyCtxt};
 use ty::item_path;
 use ty::steal::Steal;
 use ty::subst::Substs;
+use ty::fast_reject::SimplifiedType;
 use util::nodemap::{DefIdSet, NodeSet};
 
 use rustc_data_structures::indexed_vec::IndexVec;
@@ -98,6 +100,15 @@ fn default_span(&self, tcx: TyCtxt) -> Span {
     }
 }
 
+impl Key for (DefId, SimplifiedType) {
+    fn map_crate(&self) -> CrateNum {
+        self.0.krate
+    }
+    fn default_span(&self, tcx: TyCtxt) -> Span {
+        self.0.default_span(tcx)
+    }
+}
+
 impl<'tcx> Key for (DefId, &'tcx Substs<'tcx>) {
     fn map_crate(&self) -> CrateNum {
         self.0.krate
@@ -391,6 +402,24 @@ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
     }
 }
 
+impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
+    fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+        format!("trait impls of `{}`", tcx.item_path_str(def_id))
+    }
+}
+
+impl<'tcx> QueryDescription for queries::relevant_trait_impls_for<'tcx> {
+    fn describe(tcx: TyCtxt, (def_id, ty): (DefId, SimplifiedType)) -> String {
+        format!("relevant impls for: `({}, {:?})`", tcx.item_path_str(def_id), ty)
+    }
+}
+
+impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> {
+    fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+        format!("determine object safety of trait `{}`", tcx.item_path_str(def_id))
+    }
+}
+
 macro_rules! define_maps {
     (<$tcx:tt>
      $($(#[$attr:meta])*
@@ -592,7 +621,7 @@ pub struct Maps<$tcx> {
      output: $output:tt) => {
         define_map_struct! {
             tcx: $tcx,
-            ready: ([pub] $attrs $name),
+            ready: ([] $attrs $name),
             input: ($($input)*),
             output: $output
         }
@@ -820,6 +849,13 @@ fn default() -> Self {
     [] item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
     [] const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
     [] is_mir_available: IsMirAvailable(DefId) -> bool,
+
+    [] trait_impls_of: TraitImpls(DefId) -> ty::trait_def::TraitImpls,
+    // Note that TraitDef::for_each_relevant_impl() will do type simplication for you.
+    [] relevant_trait_impls_for: relevant_trait_impls_for((DefId, SimplifiedType))
+        -> ty::trait_def::TraitImpls,
+    [] specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
+    [] is_object_safe: ObjectSafety(DefId) -> bool,
 }
 
 fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
@@ -859,3 +895,7 @@ fn mir_keys(_: CrateNum) -> DepNode<DefId> {
 fn crate_variances(_: CrateNum) -> DepNode<DefId> {
     DepNode::CrateVariances
 }
+
+fn relevant_trait_impls_for((def_id, _): (DefId, SimplifiedType)) -> DepNode<DefId> {
+    DepNode::TraitImpls(def_id)
+}
index 6ca401d27ac72c13937b1795e4b90e09603fa259..a86d7351ef472be7116ff228da20fcfc47d54c53 100644 (file)
@@ -80,7 +80,7 @@
 
 pub use self::instance::{Instance, InstanceDef};
 
-pub use self::trait_def::{TraitDef, TraitFlags};
+pub use self::trait_def::TraitDef;
 
 pub use self::maps::queries;
 
@@ -2324,37 +2324,7 @@ pub fn has_attr(self, did: DefId, attr: &str) -> bool {
     }
 
     pub fn trait_has_default_impl(self, trait_def_id: DefId) -> bool {
-        let def = self.trait_def(trait_def_id);
-        def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
-    }
-
-    /// Populates the type context with all the implementations for the given
-    /// trait if necessary.
-    pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) {
-        if trait_id.is_local() {
-            return
-        }
-
-        // The type is not local, hence we are reading this out of
-        // metadata and don't need to track edges.
-        let _ignore = self.dep_graph.in_ignore();
-
-        let def = self.trait_def(trait_id);
-        if def.flags.get().intersects(TraitFlags::HAS_REMOTE_IMPLS) {
-            return;
-        }
-
-        debug!("populate_implementations_for_trait_if_necessary: searching for {:?}", def);
-
-        for impl_def_id in self.sess.cstore.implementations_of_trait(Some(trait_id)) {
-            let trait_ref = self.impl_trait_ref(impl_def_id).unwrap();
-
-            // Record the trait->implementation mapping.
-            let parent = self.impl_parent(impl_def_id).unwrap_or(trait_id);
-            def.record_remote_impl(self, impl_def_id, trait_ref, parent);
-        }
-
-        def.flags.set(def.flags.get() | TraitFlags::HAS_REMOTE_IMPLS);
+        self.trait_def(trait_def_id).has_default_impl
     }
 
     /// Given the def_id of an impl, return the def_id of the trait it implements.
@@ -2603,6 +2573,8 @@ pub fn provide(providers: &mut ty::maps::Providers) {
         adt_dtorck_constraint,
         def_span,
         trait_of_item,
+        trait_impls_of: trait_def::trait_impls_of_provider,
+        relevant_trait_impls_for: trait_def::relevant_trait_impls_provider,
         ..*providers
     };
 }
@@ -2611,6 +2583,8 @@ pub fn provide_extern(providers: &mut ty::maps::Providers) {
     *providers = ty::maps::Providers {
         adt_sized_constraint,
         adt_dtorck_constraint,
+        trait_impls_of: trait_def::trait_impls_of_provider,
+        relevant_trait_impls_for: trait_def::relevant_trait_impls_provider,
         ..*providers
     };
 }
index 097b596c5ebb6b6c9e08219168643855605b265a..865297c7ecbfd5fa67ec69dac06e3b609133e9df 100644 (file)
@@ -8,18 +8,13 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use dep_graph::DepNode;
-use hir::def_id::{DefId, LOCAL_CRATE};
-use traits::{self, specialization_graph};
-use ty;
+use hir::def_id::DefId;
+use traits::specialization_graph;
 use ty::fast_reject;
-use ty::{Ty, TyCtxt, TraitRef};
-use std::cell::{Cell, RefCell};
+use ty::fold::TypeFoldable;
+use ty::{Ty, TyCtxt};
+use std::rc::Rc;
 use hir;
-use util::nodemap::FxHashMap;
-
-use syntax::ast;
-use syntax_pos::DUMMY_SP;
 
 /// A trait's definition with type information.
 pub struct TraitDef {
@@ -33,237 +28,93 @@ pub struct TraitDef {
     /// be usable with the sugar (or without it).
     pub paren_sugar: bool,
 
-    // Impls of a trait. To allow for quicker lookup, the impls are indexed by a
-    // simplified version of their `Self` type: impls with a simplifiable `Self`
-    // are stored in `nonblanket_impls` keyed by it, while all other impls are
-    // stored in `blanket_impls`.
-    //
-    // A similar division is used within `specialization_graph`, but the ones
-    // here are (1) stored as a flat list for the trait and (2) populated prior
-    // to -- and used while -- determining specialization order.
-    //
-    // FIXME: solve the reentrancy issues and remove these lists in favor of the
-    // ones in `specialization_graph`.
-    //
-    // These lists are tracked by `DepNode::TraitImpls`; we don't use
-    // a DepTrackingMap but instead have the `TraitDef` insert the
-    // required reads/writes.
-
-    /// Impls of the trait.
-    nonblanket_impls: RefCell<
-        FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>
-    >,
-
-    /// Blanket impls associated with the trait.
-    blanket_impls: RefCell<Vec<DefId>>,
-
-    /// The specialization order for impls of this trait.
-    pub specialization_graph: RefCell<traits::specialization_graph::Graph>,
-
-    /// Various flags
-    pub flags: Cell<TraitFlags>,
-
-    /// The number of impls we've added from the local crate.
-    /// When this number matches up the list in the HIR map,
-    /// we're done, and the specialization graph is correct.
-    local_impl_count: Cell<usize>,
+    pub has_default_impl: bool,
 
     /// The ICH of this trait's DefPath, cached here so it doesn't have to be
     /// recomputed all the time.
     pub def_path_hash: u64,
 }
 
-impl<'a, 'gcx, 'tcx> TraitDef {
-    pub fn new(def_id: DefId,
-               unsafety: hir::Unsafety,
-               paren_sugar: bool,
-               def_path_hash: u64)
-               -> TraitDef {
-        TraitDef {
-            def_id: def_id,
-            paren_sugar: paren_sugar,
-            unsafety: unsafety,
-            nonblanket_impls: RefCell::new(FxHashMap()),
-            blanket_impls: RefCell::new(vec![]),
-            flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS),
-            local_impl_count: Cell::new(0),
-            specialization_graph: RefCell::new(traits::specialization_graph::Graph::new()),
-            def_path_hash: def_path_hash,
-        }
-    }
+// We don't store the list of impls in a flat list because each cached list of
+// `relevant_impls_for` we would then duplicate all blanket impls. By keeping
+// blanket and non-blanket impls separate, we can share the list of blanket
+// impls.
+#[derive(Clone)]
+pub struct TraitImpls {
+    blanket_impls: Rc<Vec<DefId>>,
+    non_blanket_impls: Rc<Vec<DefId>>,
+}
 
-    // returns None if not yet calculated
-    pub fn object_safety(&self) -> Option<bool> {
-        if self.flags.get().intersects(TraitFlags::OBJECT_SAFETY_VALID) {
-            Some(self.flags.get().intersects(TraitFlags::IS_OBJECT_SAFE))
-        } else {
-            None
+impl TraitImpls {
+    pub fn iter(&self) -> TraitImplsIter {
+        TraitImplsIter {
+            blanket_impls: self.blanket_impls.clone(),
+            non_blanket_impls: self.non_blanket_impls.clone(),
+            index: 0
         }
     }
+}
 
-    pub fn set_object_safety(&self, is_safe: bool) {
-        assert!(self.object_safety().map(|cs| cs == is_safe).unwrap_or(true));
-        self.flags.set(
-            self.flags.get() | if is_safe {
-                TraitFlags::OBJECT_SAFETY_VALID | TraitFlags::IS_OBJECT_SAFE
-            } else {
-                TraitFlags::OBJECT_SAFETY_VALID
-            }
-        );
-    }
-
-    fn write_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) {
-        tcx.dep_graph.write(DepNode::TraitImpls(self.def_id));
-    }
-
-    fn read_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) {
-        tcx.dep_graph.read(DepNode::TraitImpls(self.def_id));
-    }
-
-    /// Records a basic trait-to-implementation mapping.
-    ///
-    /// Returns `true` iff the impl has not previously been recorded.
-    fn record_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                   impl_def_id: DefId,
-                   impl_trait_ref: TraitRef<'tcx>)
-                   -> bool {
-        debug!("TraitDef::record_impl for {:?}, from {:?}",
-               self, impl_trait_ref);
+#[derive(Clone)]
+pub struct TraitImplsIter {
+    blanket_impls: Rc<Vec<DefId>>,
+    non_blanket_impls: Rc<Vec<DefId>>,
+    index: usize,
+}
 
-        // Record the write into the impl set, but only for local
-        // impls: external impls are handled differently.
-        if impl_def_id.is_local() {
-            self.write_trait_impls(tcx);
-        }
+impl Iterator for TraitImplsIter {
+    type Item = DefId;
 
-        // We don't want to borrow_mut after we already populated all impls,
-        // so check if an impl is present with an immutable borrow first.
-        if let Some(sty) = fast_reject::simplify_type(tcx,
-                                                      impl_trait_ref.self_ty(), false) {
-            if let Some(is) = self.nonblanket_impls.borrow().get(&sty) {
-                if is.contains(&impl_def_id) {
-                    return false; // duplicate - skip
-                }
-            }
-
-            self.nonblanket_impls.borrow_mut().entry(sty).or_insert(vec![]).push(impl_def_id)
+    fn next(&mut self) -> Option<DefId> {
+        if self.index < self.blanket_impls.len() {
+            let bi_index = self.index;
+            self.index += 1;
+            Some(self.blanket_impls[bi_index])
         } else {
-            if self.blanket_impls.borrow().contains(&impl_def_id) {
-                return false; // duplicate - skip
+            let nbi_index = self.index - self.blanket_impls.len();
+            if nbi_index < self.non_blanket_impls.len() {
+                self.index += 1;
+                Some(self.non_blanket_impls[nbi_index])
+            } else {
+                None
             }
-            self.blanket_impls.borrow_mut().push(impl_def_id)
         }
-
-        true
-    }
-
-    /// Records a trait-to-implementation mapping for a crate-local impl.
-    pub fn record_local_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                             impl_def_id: DefId,
-                             impl_trait_ref: TraitRef<'tcx>) {
-        assert!(impl_def_id.is_local());
-        let was_new = self.record_impl(tcx, impl_def_id, impl_trait_ref);
-        assert!(was_new);
-
-        self.local_impl_count.set(self.local_impl_count.get() + 1);
     }
 
-    /// Records a trait-to-implementation mapping.
-    pub fn record_has_default_impl(&self) {
-        self.flags.set(self.flags.get() | TraitFlags::HAS_DEFAULT_IMPL);
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let items_left = (self.blanket_impls.len() + self.non_blanket_impls.len()) - self.index;
+        (items_left, Some(items_left))
     }
+}
 
-    /// Records a trait-to-implementation mapping for a non-local impl.
-    ///
-    /// The `parent_impl` is the immediately-less-specialized impl, or the
-    /// trait's def ID if the impl is not a specialization -- information that
-    /// should be pulled from the metadata.
-    pub fn record_remote_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                              impl_def_id: DefId,
-                              impl_trait_ref: TraitRef<'tcx>,
-                              parent_impl: DefId) {
-        assert!(!impl_def_id.is_local());
+impl ExactSizeIterator for TraitImplsIter {}
 
-        // if the impl has not previously been recorded
-        if self.record_impl(tcx, impl_def_id, impl_trait_ref) {
-            // if the impl is non-local, it's placed directly into the
-            // specialization graph using parent information drawn from metadata.
-            self.specialization_graph.borrow_mut()
-                .record_impl_from_cstore(tcx, parent_impl, impl_def_id)
+impl<'a, 'gcx, 'tcx> TraitDef {
+    pub fn new(def_id: DefId,
+               unsafety: hir::Unsafety,
+               paren_sugar: bool,
+               has_default_impl: bool,
+               def_path_hash: u64)
+               -> TraitDef {
+        TraitDef {
+            def_id,
+            paren_sugar,
+            unsafety,
+            has_default_impl,
+            def_path_hash,
         }
     }
 
-    /// Adds a local impl into the specialization graph, returning an error with
-    /// overlap information if the impl overlaps but does not specialize an
-    /// existing impl.
-    pub fn add_impl_for_specialization(&self,
-                                       tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                                       impl_def_id: DefId)
-                                       -> Result<(), traits::OverlapError> {
-        assert!(impl_def_id.is_local());
-
-        self.specialization_graph.borrow_mut()
-            .insert(tcx, impl_def_id)
-    }
-
-    pub fn ancestors(&'a self, of_impl: DefId) -> specialization_graph::Ancestors<'a> {
-        specialization_graph::ancestors(self, of_impl)
-    }
-
-    /// Whether the impl set and specialization graphs are complete.
-    pub fn is_complete(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
-        tcx.populate_implementations_for_trait_if_necessary(self.def_id);
-        ty::queries::coherent_trait::try_get(tcx, DUMMY_SP, (LOCAL_CRATE, self.def_id)).is_ok()
-    }
-
-    /// If any local impls haven't been added yet, returns
-    /// Some(list of local impls for this trait).
-    fn missing_local_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>)
-                           -> Option<&'gcx [ast::NodeId]> {
-        if self.flags.get().intersects(TraitFlags::HAS_LOCAL_IMPLS) {
-            return None;
-        }
-
-        if self.is_complete(tcx) {
-            self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS);
-            return None;
-        }
-
-        let impls = tcx.hir.trait_impls(self.def_id);
-        assert!(self.local_impl_count.get() <= impls.len());
-        if self.local_impl_count.get() == impls.len() {
-            self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS);
-            return None;
-        }
-
-        Some(impls)
+    pub fn ancestors(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
+                     of_impl: DefId)
+                     -> specialization_graph::Ancestors {
+        specialization_graph::ancestors(tcx, self.def_id, of_impl)
     }
 
     pub fn for_each_impl<F: FnMut(DefId)>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, mut f: F) {
-        self.read_trait_impls(tcx);
-        tcx.populate_implementations_for_trait_if_necessary(self.def_id);
-
-        let local_impls = self.missing_local_impls(tcx);
-        if let Some(impls) = local_impls {
-            for &id in impls {
-                f(tcx.hir.local_def_id(id));
-            }
-        }
-        let mut f = |def_id: DefId| {
-            if !(local_impls.is_some() && def_id.is_local()) {
-                f(def_id);
-            }
-        };
-
-        for &impl_def_id in self.blanket_impls.borrow().iter() {
+        for impl_def_id in tcx.trait_impls_of(self.def_id).iter() {
             f(impl_def_id);
         }
-
-        for v in self.nonblanket_impls.borrow().values() {
-            for &impl_def_id in v {
-                f(impl_def_id);
-            }
-        }
     }
 
     /// Iterate over every impl that could possibly match the
@@ -273,25 +124,6 @@ pub fn for_each_relevant_impl<F: FnMut(DefId)>(&self,
                                                    self_ty: Ty<'tcx>,
                                                    mut f: F)
     {
-        self.read_trait_impls(tcx);
-        tcx.populate_implementations_for_trait_if_necessary(self.def_id);
-
-        let local_impls = self.missing_local_impls(tcx);
-        if let Some(impls) = local_impls {
-            for &id in impls {
-                f(tcx.hir.local_def_id(id));
-            }
-        }
-        let mut f = |def_id: DefId| {
-            if !(local_impls.is_some() && def_id.is_local()) {
-                f(def_id);
-            }
-        };
-
-        for &impl_def_id in self.blanket_impls.borrow().iter() {
-            f(impl_def_id);
-        }
-
         // simplify_type(.., false) basically replaces type parameters and
         // projections with infer-variables. This is, of course, done on
         // the impl trait-ref when it is instantiated, but not on the
@@ -304,29 +136,86 @@ pub fn for_each_relevant_impl<F: FnMut(DefId)>(&self,
         // replace `S` with anything - this impl of course can't be
         // selected, and as there are hundreds of similar impls,
         // considering them would significantly harm performance.
-        if let Some(simp) = fast_reject::simplify_type(tcx, self_ty, true) {
-            if let Some(impls) = self.nonblanket_impls.borrow().get(&simp) {
-                for &impl_def_id in impls {
-                    f(impl_def_id);
-                }
-            }
+        let relevant_impls = if let Some(simplified_self_ty) =
+                fast_reject::simplify_type(tcx, self_ty, true) {
+            tcx.relevant_trait_impls_for((self.def_id, simplified_self_ty))
         } else {
-            for v in self.nonblanket_impls.borrow().values() {
-                for &impl_def_id in v {
-                    f(impl_def_id);
-                }
-            }
+            tcx.trait_impls_of(self.def_id)
+        };
+
+        for impl_def_id in relevant_impls.iter() {
+            f(impl_def_id);
         }
     }
 }
 
-bitflags! {
-    flags TraitFlags: u32 {
-        const NO_TRAIT_FLAGS        = 0,
-        const HAS_DEFAULT_IMPL      = 1 << 0,
-        const IS_OBJECT_SAFE        = 1 << 1,
-        const OBJECT_SAFETY_VALID   = 1 << 2,
-        const HAS_REMOTE_IMPLS      = 1 << 3,
-        const HAS_LOCAL_IMPLS       = 1 << 4,
+// Query provider for `trait_impls_of`.
+pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                                trait_id: DefId)
+                                                -> TraitImpls {
+    let remote_impls = if trait_id.is_local() {
+        // Traits defined in the current crate can't have impls in upstream
+        // crates, so we don't bother querying the cstore.
+        Vec::new()
+    } else {
+        tcx.sess.cstore.implementations_of_trait(Some(trait_id))
+    };
+
+    let mut blanket_impls = Vec::new();
+    let mut non_blanket_impls = Vec::new();
+
+    let local_impls = tcx.hir
+                         .trait_impls(trait_id)
+                         .into_iter()
+                         .map(|&node_id| tcx.hir.local_def_id(node_id));
+
+     for impl_def_id in local_impls.chain(remote_impls.into_iter()) {
+        let impl_self_ty = tcx.type_of(impl_def_id);
+        if impl_def_id.is_local() && impl_self_ty.references_error() {
+            continue
+        }
+
+        if fast_reject::simplify_type(tcx, impl_self_ty, false).is_some() {
+            non_blanket_impls.push(impl_def_id);
+        } else {
+            blanket_impls.push(impl_def_id);
+        }
+    }
+
+    TraitImpls {
+        blanket_impls: Rc::new(blanket_impls),
+        non_blanket_impls: Rc::new(non_blanket_impls),
+    }
+}
+
+// Query provider for `relevant_trait_impls_for`.
+pub(super) fn relevant_trait_impls_provider<'a, 'tcx>(
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    (trait_id, self_ty): (DefId, fast_reject::SimplifiedType))
+    -> TraitImpls
+{
+    let all_trait_impls = tcx.trait_impls_of(trait_id);
+
+    let relevant: Vec<DefId> = all_trait_impls
+        .non_blanket_impls
+        .iter()
+        .cloned()
+        .filter(|&impl_def_id| {
+            let impl_self_ty = tcx.type_of(impl_def_id);
+            let impl_simple_self_ty = fast_reject::simplify_type(tcx,
+                                                                 impl_self_ty,
+                                                                 false).unwrap();
+            impl_simple_self_ty == self_ty
+        })
+        .collect();
+
+    if all_trait_impls.non_blanket_impls.len() == relevant.len() {
+        // If we didn't filter anything out, re-use the existing vec.
+        all_trait_impls
+    } else {
+        TraitImpls {
+            blanket_impls: all_trait_impls.blanket_impls.clone(),
+            non_blanket_impls: Rc::new(relevant),
+        }
     }
 }
index 7b5e2253109aa01c1d49d0fdc8152ed8cc618aab..17564671a1e364f45eed26442e33e6b9e8231b06 100644 (file)
@@ -116,6 +116,7 @@ pub fn record_time<T, F>(accu: &Cell<Duration>, f: F) -> T where
 }
 
 // Like std::macros::try!, but for Option<>.
+#[cfg(unix)]
 macro_rules! option_try(
     ($e:expr) => (match $e { Some(e) => e, None => return None })
 );
index 5b5113caa8e8c0dd9d68e7e8078b71991efa107e..2e949f48c175ee7e44c9126d771043367b352e30 100644 (file)
@@ -22,7 +22,6 @@ rustc_data_structures = { path = "../librustc_data_structures" }
 rustc_errors = { path = "../librustc_errors" }
 rustc_incremental = { path = "../librustc_incremental" }
 rustc_lint = { path = "../librustc_lint" }
-rustc_llvm = { path = "../librustc_llvm" }
 rustc_metadata = { path = "../librustc_metadata" }
 rustc_mir = { path = "../librustc_mir" }
 rustc_passes = { path = "../librustc_passes" }
index 8fddbe110b0e6f4616fb584ede2e9afc71b383cb..bca82ff9a46dfb803dfa7ec8947763c66ad5a468 100644 (file)
@@ -22,6 +22,7 @@
 use rustc::middle::privacy::AccessLevels;
 use rustc::mir::transform::{MIR_CONST, MIR_VALIDATED, MIR_OPTIMIZED, Passes};
 use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
+use rustc::traits;
 use rustc::util::common::time;
 use rustc::util::nodemap::NodeSet;
 use rustc::util::fs::rename_or_copy_remove;
@@ -699,6 +700,8 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
 
         let krate = ecx.monotonic_expander().expand_crate(krate);
 
+        ecx.check_unused_macros();
+
         let mut missing_fragment_specifiers: Vec<_> =
             ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
         missing_fragment_specifiers.sort();
@@ -892,6 +895,7 @@ macro_rules! try_with_f {
     trans::provide(&mut local_providers);
     typeck::provide(&mut local_providers);
     ty::provide(&mut local_providers);
+    traits::provide(&mut local_providers);
     reachable::provide(&mut local_providers);
     rustc_const_eval::provide(&mut local_providers);
     middle::region::provide(&mut local_providers);
@@ -900,6 +904,7 @@ macro_rules! try_with_f {
     cstore::provide(&mut extern_providers);
     trans::provide(&mut extern_providers);
     ty::provide_extern(&mut extern_providers);
+    traits::provide_extern(&mut extern_providers);
     // FIXME(eddyb) get rid of this once we replace const_eval with miri.
     rustc_const_eval::provide(&mut extern_providers);
 
index 024fc546a158efe9d175f4ba7357be522cf1a739..34f636d0b9a12db2225e9c212d5c390b3e9309fc 100644 (file)
@@ -56,7 +56,6 @@
 extern crate rustc_trans;
 extern crate rustc_typeck;
 extern crate serialize;
-extern crate rustc_llvm as llvm;
 #[macro_use]
 extern crate log;
 extern crate syntax;
@@ -70,7 +69,7 @@
 use rustc_save_analysis as save;
 use rustc_save_analysis::DumpHandler;
 use rustc_trans::back::link;
-use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
+use rustc_trans::back::write::{RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
 use rustc::dep_graph::DepGraph;
 use rustc::session::{self, config, Session, build_session, CompileResult};
 use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType};
@@ -182,7 +181,7 @@ macro_rules! do_or_return {($expr: expr, $sess: expr) => {
     let (sopts, cfg) = config::build_session_options_and_crate_config(&matches);
 
     if sopts.debugging_opts.debug_llvm {
-        unsafe { llvm::LLVMRustSetDebug(1); }
+        rustc_trans::enable_llvm_debug();
     }
 
     let descriptions = diagnostics_registry();
@@ -204,13 +203,14 @@ macro_rules! do_or_return {($expr: expr, $sess: expr) => {
     };
 
     let dep_graph = DepGraph::new(sopts.build_dep_graph());
-    let cstore = Rc::new(CStore::new(&dep_graph));
+    let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
 
     let loader = file_loader.unwrap_or(box RealFileLoader);
     let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping()));
     let mut sess = session::build_session_with_codemap(
         sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest,
     );
+    rustc_trans::init(&sess);
     rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
 
     let mut cfg = config::build_configuration(&sess, cfg);
@@ -409,12 +409,13 @@ fn no_input(&mut self,
                     return None;
                 }
                 let dep_graph = DepGraph::new(sopts.build_dep_graph());
-                let cstore = Rc::new(CStore::new(&dep_graph));
+                let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
                 let mut sess = build_session(sopts.clone(),
                     &dep_graph,
                     None,
                     descriptions.clone(),
                     cstore.clone());
+                rustc_trans::init(&sess);
                 rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
                 let mut cfg = config::build_configuration(&sess, cfg.clone());
                 target_features::add_configuration(&mut cfg, &sess);
@@ -558,7 +559,11 @@ pub fn list_metadata(sess: &Session, matches: &getopts::Matches, input: &Input)
                 &Input::File(ref ifile) => {
                     let path = &(*ifile);
                     let mut v = Vec::new();
-                    locator::list_file_metadata(&sess.target.target, path, &mut v).unwrap();
+                    locator::list_file_metadata(&sess.target.target,
+                                                path,
+                                                sess.cstore.metadata_loader(),
+                                                &mut v)
+                            .unwrap();
                     println!("{}", String::from_utf8(v).unwrap());
                 }
                 &Input::Str { .. } => {
@@ -665,14 +670,6 @@ fn print_crate_info(sess: &Session,
                         println!("{}", cfg);
                     }
                 }
-                PrintRequest::TargetCPUs => {
-                    let tm = create_target_machine(sess);
-                    unsafe { llvm::LLVMRustPrintTargetCPUs(tm); }
-                }
-                PrintRequest::TargetFeatures => {
-                    let tm = create_target_machine(sess);
-                    unsafe { llvm::LLVMRustPrintTargetFeatures(tm); }
-                }
                 PrintRequest::RelocationModels => {
                     println!("Available relocation models:");
                     for &(name, _) in RELOC_MODEL_ARGS.iter() {
@@ -687,6 +684,9 @@ fn print_crate_info(sess: &Session,
                     }
                     println!("");
                 }
+                PrintRequest::TargetCPUs | PrintRequest::TargetFeatures => {
+                    rustc_trans::print(*req, sess);
+                }
             }
         }
         return Compilation::Stop;
@@ -724,10 +724,7 @@ fn unw(x: Option<&str>) -> &str {
         println!("commit-date: {}", unw(commit_date_str()));
         println!("host: {}", config::host_triple());
         println!("release: {}", unw(release_str()));
-        unsafe {
-            println!("LLVM version: {}.{}",
-                     llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());
-        }
+        rustc_trans::print_version();
     }
 }
 
@@ -1020,9 +1017,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
     }
 
     if cg_flags.contains(&"passes=list".to_string()) {
-        unsafe {
-            ::llvm::LLVMRustPrintPasses();
-        }
+        rustc_trans::print_passes();
         return None;
     }
 
index 61bc7c6eb4c714e81f7b1ab3283e3e261d28a4f3..bee61bb398029756511fcc1a2300c1209b9ac618 100644 (file)
@@ -9,24 +9,9 @@
 // except according to those terms.
 
 use syntax::ast;
-use llvm::LLVMRustHasFeature;
 use rustc::session::Session;
-use rustc_trans::back::write::create_target_machine;
 use syntax::symbol::Symbol;
-use libc::c_char;
-
-// WARNING: the features must be known to LLVM or the feature
-// detection code will walk past the end of the feature array,
-// leading to crashes.
-
-const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"];
-
-const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
-                                                 "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
-                                                 "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
-                                                 "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
-
-const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"];
+use rustc_trans;
 
 /// Add `target_feature = "..."` cfgs for a variety of platform
 /// specific features (SSE, NEON etc.).
 /// This is performed by checking whether a whitelisted set of
 /// features is available on the target machine, by querying LLVM.
 pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
-    let target_machine = create_target_machine(sess);
-
-    let whitelist = match &*sess.target.target.arch {
-        "arm" => ARM_WHITELIST,
-        "x86" | "x86_64" => X86_WHITELIST,
-        "hexagon" => HEXAGON_WHITELIST,
-        _ => &[],
-    };
-
     let tf = Symbol::intern("target_feature");
-    for feat in whitelist {
-        assert_eq!(feat.chars().last(), Some('\0'));
-        if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
-            cfg.insert((tf, Some(Symbol::intern(&feat[..feat.len() - 1]))));
-        }
+
+    for feat in rustc_trans::target_features(sess) {
+        cfg.insert((tf, Some(feat)));
     }
 
     let requested_features = sess.opts.cg.target_feature.split(',');
index e2cbc480715fb66d590be7a5311b9a52e9bf3277..1d236a96bf62ed97c313cfad34909734a9def2aa 100644 (file)
@@ -14,6 +14,7 @@
 use rustc::dep_graph::DepGraph;
 use rustc_lint;
 use rustc_resolve::MakeGlobMap;
+use rustc_trans;
 use rustc::middle::lang_items;
 use rustc::middle::free_region::FreeRegionMap;
 use rustc::middle::region::{CodeExtent, RegionMaps};
@@ -104,13 +105,14 @@ fn test_env<F>(source_string: &str,
 
     let dep_graph = DepGraph::new(false);
     let _ignore = dep_graph.in_ignore();
-    let cstore = Rc::new(CStore::new(&dep_graph));
+    let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
     let sess = session::build_session_(options,
                                        &dep_graph,
                                        None,
                                        diagnostic_handler,
                                        Rc::new(CodeMap::new(FilePathMapping::empty())),
                                        cstore.clone());
+    rustc_trans::init(&sess);
     rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
     let input = config::Input::Str {
         name: driver::anon_src(),
index 6f5cc1f3f45c87d0384dd439325e4a8453fb4fb0..8cdabc1d894e229af7070f87337312e11c5fda0b 100644 (file)
 use rustc::hir::itemlikevisit::ItemLikeVisitor;
 use rustc::ich::{Fingerprint, StableHashingContext};
 use rustc::ty::TyCtxt;
+use rustc::util::common::record_time;
 use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
 use rustc_data_structures::fx::FxHashMap;
-use rustc::util::common::record_time;
+use rustc_data_structures::accumulate_vec::AccumulateVec;
 
 pub type IchHasher = StableHasher<Fingerprint>;
 
@@ -159,6 +160,11 @@ fn compute_crate_hash(&mut self) {
                                         // difference, filter them out.
                                         return None
                                     }
+                                    DepNode::AllLocalTraitImpls => {
+                                        // These are already covered by hashing
+                                        // the HIR.
+                                        return None
+                                    }
                                     ref other => {
                                         bug!("Found unexpected DepNode during \
                                               SVH computation: {:?}",
@@ -213,6 +219,49 @@ fn hash_crate_root_module(&mut self, krate: &'tcx hir::Crate) {
                                                  true,
                                                  (module, (span, attrs)));
     }
+
+    fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
+    {
+        let tcx = self.hcx.tcx();
+
+        let mut impls: Vec<(u64, Fingerprint)> = krate
+            .trait_impls
+            .iter()
+            .map(|(&trait_id, impls)| {
+                let trait_id = tcx.def_path_hash(trait_id);
+                let mut impls: AccumulateVec<[_; 32]> = impls
+                    .iter()
+                    .map(|&node_id| {
+                        let def_id = tcx.hir.local_def_id(node_id);
+                        tcx.def_path_hash(def_id)
+                    })
+                    .collect();
+
+                impls.sort_unstable();
+                let mut hasher = StableHasher::new();
+                impls.hash_stable(&mut self.hcx, &mut hasher);
+                (trait_id, hasher.finish())
+            })
+            .collect();
+
+        impls.sort_unstable();
+
+        let mut default_impls: AccumulateVec<[_; 32]> = krate
+            .trait_default_impl
+            .iter()
+            .map(|(&trait_def_id, &impl_node_id)| {
+                let impl_def_id = tcx.hir.local_def_id(impl_node_id);
+                (tcx.def_path_hash(trait_def_id), tcx.def_path_hash(impl_def_id))
+            })
+            .collect();
+
+        default_impls.sort_unstable();
+
+        let mut hasher = StableHasher::new();
+        impls.hash_stable(&mut self.hcx, &mut hasher);
+
+        self.hashes.insert(DepNode::AllLocalTraitImpls, hasher.finish());
+    }
 }
 
 impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
@@ -235,6 +284,8 @@ fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) {
     }
 }
 
+
+
 pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
                                                     -> IncrementalHashesMap {
     let _ignore = tcx.dep_graph.in_ignore();
@@ -272,6 +323,8 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
             let fingerprint = hasher.finish();
             visitor.hashes.insert(dep_node, fingerprint);
         }
+
+        visitor.compute_and_store_ich_for_trait_impls(krate);
     });
 
     tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
index 31b30d2b2857cb253e97e8a1a94b581f8ae52ae2..044b143e306250944542e6b7fb12b3c73a090a21 100644 (file)
@@ -37,14 +37,3 @@ macro_rules! graph {
         }
     }
 }
-
-macro_rules! set {
-    ($( $value:expr ),*) => {
-        {
-            use $crate::rustc_data_structures::fx::FxHashSet;
-            let mut set = FxHashSet();
-            $(set.insert($value);)*
-            set
-        }
-    }
-}
index 2d0b5a6a51c6ba20baf5da358614c4930e847444..479c7206cb4cb95b150c5b0adcb0b40daf86c0de 100644 (file)
@@ -171,7 +171,8 @@ macro_rules! add_lint_group {
                     UNUSED_MUST_USE,
                     UNUSED_UNSAFE,
                     PATH_STATEMENTS,
-                    UNUSED_ATTRIBUTES);
+                    UNUSED_ATTRIBUTES,
+                    UNUSED_MACROS);
 
     // Guidelines for creating a future incompatibility lint:
     //
index e8b906092730e5cbee6f873daa302fd57eaf46a6..f47788ee036dc5e312d4a05754a3f4b0cd8c7143 100644 (file)
@@ -11,13 +11,13 @@ crate-type = ["dylib"]
 [dependencies]
 flate = { path = "../libflate" }
 log = "0.3"
+owning_ref = "0.3.3"
 proc_macro = { path = "../libproc_macro" }
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
 rustc_const_math = { path = "../librustc_const_math" }
 rustc_data_structures = { path = "../librustc_data_structures" }
 rustc_errors = { path = "../librustc_errors" }
-rustc_llvm = { path = "../librustc_llvm" }
 serialize = { path = "../libserialize" }
 syntax = { path = "../libsyntax" }
 syntax_ext = { path = "../libsyntax_ext" }
index d2874f16289015afb8095c96111d203aac3745fa..ec6947b4a486c31ebcaa1444fc827d2115c72f04 100644 (file)
@@ -315,11 +315,20 @@ fn register_crate(&mut self,
         let exported_symbols = crate_root.exported_symbols
                                          .map(|x| x.decode(&metadata).collect());
 
+        let trait_impls = crate_root
+            .impls
+            .map(|impls| {
+                impls.decode(&metadata)
+                     .map(|trait_impls| (trait_impls.trait_id, trait_impls.impls))
+                     .collect()
+            });
+
         let mut cmeta = cstore::CrateMetadata {
             name: name,
             extern_crate: Cell::new(None),
             def_path_table: def_path_table,
             exported_symbols: exported_symbols,
+            trait_impls: trait_impls,
             proc_macros: crate_root.macro_derive_registrar.map(|_| {
                 self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
             }),
@@ -393,6 +402,7 @@ fn resolve_crate(&mut self,
                 rejected_via_filename: vec![],
                 should_match_name: true,
                 is_proc_macro: Some(false),
+                metadata_loader: &*self.cstore.metadata_loader,
             };
 
             self.load(&mut locate_ctxt).or_else(|| {
@@ -554,6 +564,7 @@ fn read_extension_crate(&mut self, span: Span, info: &ExternCrateInfo) -> Extens
             rejected_via_filename: vec![],
             should_match_name: true,
             is_proc_macro: None,
+            metadata_loader: &*self.cstore.metadata_loader,
         };
         let library = self.load(&mut locate_ctxt).or_else(|| {
             if !is_cross {
index c12b4209675dee9847304591dc66ea6c503834b3..64fccb0314e97d67743a613a58406568dbcde43f 100644 (file)
 // The crate store - a central repo for information collected about external
 // crates and libraries
 
-use locator;
 use schema::{self, Tracked};
 
 use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind};
 use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefIndex, DefId};
 use rustc::hir::map::definitions::DefPathTable;
 use rustc::hir::svh::Svh;
-use rustc::middle::cstore::{DepKind, ExternCrate};
+use rustc::middle::cstore::{DepKind, ExternCrate, MetadataLoader};
 use rustc_back::PanicStrategy;
 use rustc_data_structures::indexed_vec::IndexVec;
 use rustc::util::nodemap::{FxHashMap, FxHashSet, NodeMap, DefIdMap};
 
 use std::cell::{RefCell, Cell};
 use std::rc::Rc;
-use flate::Bytes;
+use owning_ref::ErasedBoxRef;
 use syntax::{ast, attr};
 use syntax::ext::base::SyntaxExtension;
 use syntax::symbol::Symbol;
 // own crate numbers.
 pub type CrateNumMap = IndexVec<CrateNum, CrateNum>;
 
-pub enum MetadataBlob {
-    Inflated(Bytes),
-    Archive(locator::ArchiveMetadata),
-    Raw(Vec<u8>),
-}
+pub struct MetadataBlob(pub ErasedBoxRef<[u8]>);
 
 /// Holds information about a syntax_pos::FileMap imported from another crate.
 /// See `imported_filemaps()` for more information.
@@ -85,6 +80,8 @@ pub struct CrateMetadata {
 
     pub exported_symbols: Tracked<FxHashSet<DefIndex>>,
 
+    pub trait_impls: Tracked<FxHashMap<(u32, DefIndex), schema::LazySeq<DefIndex>>>,
+
     pub dep_kind: Cell<DepKind>,
     pub source: CrateSource,
 
@@ -103,10 +100,11 @@ pub struct CStore {
     statically_included_foreign_items: RefCell<FxHashSet<DefIndex>>,
     pub dllimport_foreign_items: RefCell<FxHashSet<DefIndex>>,
     pub visible_parent_map: RefCell<DefIdMap<DefId>>,
+    pub metadata_loader: Box<MetadataLoader>,
 }
 
 impl CStore {
-    pub fn new(dep_graph: &DepGraph) -> CStore {
+    pub fn new(dep_graph: &DepGraph, metadata_loader: Box<MetadataLoader>) -> CStore {
         CStore {
             dep_graph: dep_graph.clone(),
             metas: RefCell::new(FxHashMap()),
@@ -116,6 +114,7 @@ pub fn new(dep_graph: &DepGraph) -> CStore {
             statically_included_foreign_items: RefCell::new(FxHashSet()),
             dllimport_foreign_items: RefCell::new(FxHashSet()),
             visible_parent_map: RefCell::new(FxHashMap()),
+            metadata_loader: metadata_loader,
         }
     }
 
index dbf3e94832fc4fd724a010fe841d440243c406b0..4b7083590d24af70af5fe809b12a06351564f494 100644 (file)
 
 use cstore;
 use encoder;
-use locator;
 use schema;
 
 use rustc::dep_graph::DepTrackingMapConfig;
 use rustc::middle::cstore::{CrateStore, CrateSource, LibSource, DepKind,
-                            ExternCrate, NativeLibrary, LinkMeta,
+                            ExternCrate, NativeLibrary, MetadataLoader, LinkMeta,
                             LinkagePreference, LoadedMacro, EncodedMetadata};
 use rustc::hir::def;
 use rustc::middle::lang_items;
@@ -38,7 +37,6 @@
 use syntax::symbol::Symbol;
 use syntax_pos::{Span, NO_EXPANSION};
 use rustc::hir::svh::Svh;
-use rustc_back::target::Target;
 use rustc::hir;
 
 macro_rules! provide {
@@ -135,6 +133,10 @@ fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc<Any> {
         self.get_crate_data(krate)
     }
 
+    fn metadata_loader(&self) -> &MetadataLoader {
+        &*self.metadata_loader
+    }
+
     fn visibility(&self, def: DefId) -> ty::Visibility {
         self.dep_graph.read(DepNode::MetaData(def));
         self.get_crate_data(def.krate).get_visibility(def.index)
@@ -147,10 +149,8 @@ fn item_generics_cloned(&self, def: DefId) -> ty::Generics {
 
     fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
     {
-        if let Some(def_id) = filter {
-            self.dep_graph.read(DepNode::MetaData(def_id));
-        }
         let mut result = vec![];
+
         self.iter_crate_data(|_, cdata| {
             cdata.get_implementations_for_trait(filter, &self.dep_graph, &mut result)
         });
@@ -420,17 +420,6 @@ fn used_link_args(&self) -> Vec<String>
     {
         self.get_used_link_args().borrow().clone()
     }
-
-    fn metadata_filename(&self) -> &str
-    {
-        locator::METADATA_FILENAME
-    }
-
-    fn metadata_section_name(&self, target: &Target) -> &str
-    {
-        locator::meta_section_name(target)
-    }
-
     fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
     {
         self.do_get_used_crates(prefer)
@@ -522,4 +511,4 @@ fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>> {
         drop(visible_parent_map);
         self.visible_parent_map.borrow()
     }
-}
+}
\ No newline at end of file
index 819095e262832b207b9f689014c2e805cbc455b4..754f27810c453962e405463e3a83317896382c38 100644 (file)
@@ -77,11 +77,7 @@ fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> {
 
 impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob {
     fn raw_bytes(self) -> &'a [u8] {
-        match *self {
-            MetadataBlob::Inflated(ref vec) => vec,
-            MetadataBlob::Archive(ref ar) => ar.as_slice(),
-            MetadataBlob::Raw(ref vec) => vec,
-        }
+        &self.0
     }
 }
 
@@ -505,16 +501,11 @@ pub fn get_trait_def(&self, item_id: DefIndex) -> ty::TraitDef {
             _ => bug!(),
         };
 
-        let def = ty::TraitDef::new(self.local_def_id(item_id),
-                                    data.unsafety,
-                                    data.paren_sugar,
-                                    self.def_path_table.def_path_hash(item_id));
-
-        if data.has_default_impl {
-            def.record_has_default_impl();
-        }
-
-        def
+        ty::TraitDef::new(self.local_def_id(item_id),
+                          data.unsafety,
+                          data.paren_sugar,
+                          data.has_default_impl,
+                          self.def_path_table.def_path_hash(item_id))
     }
 
     fn get_variant(&self, item: &Entry, index: DefIndex) -> ty::VariantDef {
@@ -961,17 +952,17 @@ pub fn get_implementations_for_trait(&self,
             None => None,
         };
 
-        // FIXME(eddyb) Make this O(1) instead of O(n).
         let dep_node = self.metadata_dep_node(GlobalMetaDataKind::Impls);
-        for trait_impls in self.root.impls.get(dep_graph, dep_node).decode(self) {
-            if filter.is_some() && filter != Some(trait_impls.trait_id) {
-                continue;
-            }
-
-            result.extend(trait_impls.impls.decode(self).map(|index| self.local_def_id(index)));
 
-            if filter.is_some() {
-                break;
+        if let Some(filter) = filter {
+            if let Some(impls) = self.trait_impls
+                                     .get(dep_graph, dep_node)
+                                     .get(&filter) {
+                result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
+            }
+        } else {
+            for impls in self.trait_impls.get(dep_graph, dep_node).values() {
+                result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
             }
         }
     }
index fa4ebed16189c7ab2cb81defdb02eeef3a4068be..93fcdc455e5dd3762dc7dfe7d6f78e13c4e66ea0 100644 (file)
@@ -943,7 +943,7 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
                 let trait_ref = tcx.impl_trait_ref(def_id);
                 let parent = if let Some(trait_ref) = trait_ref {
                     let trait_def = tcx.trait_def(trait_ref.def_id);
-                    trait_def.ancestors(def_id).skip(1).next().and_then(|node| {
+                    trait_def.ancestors(tcx, def_id).skip(1).next().and_then(|node| {
                         match node {
                             specialization_graph::Node::Impl(parent) => Some(parent),
                             _ => None,
@@ -1295,23 +1295,37 @@ fn encode_lang_items_missing(&mut self, _: ()) -> LazySeq<lang_items::LangItem>
 
     /// Encodes an index, mapping each trait to its (local) implementations.
     fn encode_impls(&mut self, _: ()) -> LazySeq<TraitImpls> {
+        debug!("IsolatedEncoder::encode_impls()");
+        let tcx = self.tcx;
         let mut visitor = ImplVisitor {
-            tcx: self.tcx,
+            tcx: tcx,
             impls: FxHashMap(),
         };
-        self.tcx.hir.krate().visit_all_item_likes(&mut visitor);
+        tcx.hir.krate().visit_all_item_likes(&mut visitor);
+
+        let mut all_impls: Vec<_> = visitor.impls.into_iter().collect();
 
-        let all_impls: Vec<_> = visitor.impls
+        // Bring everything into deterministic order for hashing
+        all_impls.sort_unstable_by_key(|&(trait_def_id, _)| {
+            tcx.def_path_hash(trait_def_id)
+        });
+
+        let all_impls: Vec<_> = all_impls
             .into_iter()
-            .map(|(trait_def_id, impls)| {
+            .map(|(trait_def_id, mut impls)| {
+                // Bring everything into deterministic order for hashing
+                impls.sort_unstable_by_key(|&def_index| {
+                    tcx.hir.definitions().def_path_hash(def_index)
+                });
+
                 TraitImpls {
                     trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
-                    impls: self.lazy_seq(impls),
+                    impls: self.lazy_seq_from_slice(&impls[..]),
                 }
             })
             .collect();
 
-        self.lazy_seq(all_impls)
+        self.lazy_seq_from_slice(&all_impls[..])
     }
 
     // Encodes all symbols exported from this crate into the metadata.
index 27555f49e57fc1afc0991cc53c81ae1cb1eca9a0..56c150fd4c82f27608f2c6bc57da9df72c08762a 100644 (file)
@@ -29,6 +29,7 @@
 
 #![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
 #![cfg_attr(stage0, feature(staged_api))]
+#![feature(sort_unstable)]
 
 #[macro_use]
 extern crate log;
@@ -37,6 +38,7 @@
 extern crate syntax_pos;
 extern crate flate;
 extern crate serialize as rustc_serialize; // used by deriving
+extern crate owning_ref;
 extern crate rustc_errors as errors;
 extern crate syntax_ext;
 extern crate proc_macro;
@@ -46,7 +48,6 @@
 extern crate rustc_back;
 extern crate rustc_const_math;
 extern crate rustc_data_structures;
-extern crate rustc_llvm;
 
 mod diagnostics;
 
index 84bb82de370e4f55f5e4635060d8ccde38fa4dc9..34b07af9f01f439cf84e551c5ff5cd629b6bcf98 100644 (file)
 use schema::{METADATA_HEADER, rustc_version};
 
 use rustc::hir::svh::Svh;
+use rustc::middle::cstore::MetadataLoader;
 use rustc::session::{config, Session};
 use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch};
 use rustc::session::search_paths::PathKind;
-use rustc::util::common;
 use rustc::util::nodemap::FxHashMap;
 
-use rustc_llvm as llvm;
-use rustc_llvm::{False, ObjectFile, mk_section_iter};
-use rustc_llvm::archive_ro::ArchiveRO;
 use errors::DiagnosticBuilder;
 use syntax::symbol::Symbol;
 use syntax_pos::Span;
 use std::fs::{self, File};
 use std::io::{self, Read};
 use std::path::{Path, PathBuf};
-use std::ptr;
-use std::slice;
 use std::time::Instant;
 
 use flate;
+use owning_ref::{ErasedBoxRef, OwningRef};
 
 pub struct CrateMismatch {
     path: PathBuf,
@@ -272,12 +268,7 @@ pub struct Context<'a> {
     pub rejected_via_filename: Vec<CrateMismatch>,
     pub should_match_name: bool,
     pub is_proc_macro: Option<bool>,
-}
-
-pub struct ArchiveMetadata {
-    _archive: ArchiveRO,
-    // points into self._archive
-    data: *const [u8],
+    pub metadata_loader: &'a MetadataLoader,
 }
 
 pub struct CratePaths {
@@ -287,8 +278,6 @@ pub struct CratePaths {
     pub rmeta: Option<PathBuf>,
 }
 
-pub const METADATA_FILENAME: &'static str = "rust.metadata.bin";
-
 #[derive(Copy, Clone, PartialEq)]
 enum CrateFlavor {
     Rlib,
@@ -596,20 +585,21 @@ fn extract_one(&mut self,
         let mut err: Option<DiagnosticBuilder> = None;
         for (lib, kind) in m {
             info!("{} reading metadata from: {}", flavor, lib.display());
-            let (hash, metadata) = match get_metadata_section(self.target, flavor, &lib) {
-                Ok(blob) => {
-                    if let Some(h) = self.crate_matches(&blob, &lib) {
-                        (h, blob)
-                    } else {
-                        info!("metadata mismatch");
+            let (hash, metadata) =
+                match get_metadata_section(self.target, flavor, &lib, self.metadata_loader) {
+                    Ok(blob) => {
+                        if let Some(h) = self.crate_matches(&blob, &lib) {
+                            (h, blob)
+                        } else {
+                            info!("metadata mismatch");
+                            continue;
+                        }
+                    }
+                    Err(err) => {
+                        info!("no metadata found: {}", err);
                         continue;
                     }
-                }
-                Err(err) => {
-                    info!("no metadata found: {}", err);
-                    continue;
-                }
-            };
+                };
             // If we see multiple hashes, emit an error about duplicate candidates.
             if slot.as_ref().map_or(false, |s| s.0 != hash) {
                 let mut e = struct_span_err!(self.sess,
@@ -833,50 +823,14 @@ pub fn note_crate_name(err: &mut DiagnosticBuilder, name: &str) {
     err.note(&format!("crate name: {}", name));
 }
 
-impl ArchiveMetadata {
-    fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
-        let data = {
-            let section = ar.iter()
-                .filter_map(|s| s.ok())
-                .find(|sect| sect.name() == Some(METADATA_FILENAME));
-            match section {
-                Some(s) => s.data() as *const [u8],
-                None => {
-                    debug!("didn't find '{}' in the archive", METADATA_FILENAME);
-                    return None;
-                }
-            }
-        };
-
-        Some(ArchiveMetadata {
-            _archive: ar,
-            data: data,
-        })
-    }
-
-    pub fn as_slice<'a>(&'a self) -> &'a [u8] {
-        unsafe { &*self.data }
-    }
-}
-
-fn verify_decompressed_encoding_version(blob: &MetadataBlob,
-                                        filename: &Path)
-                                        -> Result<(), String> {
-    if !blob.is_compatible() {
-        Err((format!("incompatible metadata version found: '{}'",
-                     filename.display())))
-    } else {
-        Ok(())
-    }
-}
-
 // Just a small wrapper to time how long reading metadata takes.
 fn get_metadata_section(target: &Target,
                         flavor: CrateFlavor,
-                        filename: &Path)
+                        filename: &Path,
+                        loader: &MetadataLoader)
                         -> Result<MetadataBlob, String> {
     let start = Instant::now();
-    let ret = get_metadata_section_imp(target, flavor, filename);
+    let ret = get_metadata_section_imp(target, flavor, filename, loader);
     info!("reading {:?} => {:?}",
           filename.file_name().unwrap(),
           start.elapsed());
@@ -885,118 +839,61 @@ fn get_metadata_section(target: &Target,
 
 fn get_metadata_section_imp(target: &Target,
                             flavor: CrateFlavor,
-                            filename: &Path)
+                            filename: &Path,
+                            loader: &MetadataLoader)
                             -> Result<MetadataBlob, String> {
     if !filename.exists() {
         return Err(format!("no such file: '{}'", filename.display()));
     }
-    if flavor == CrateFlavor::Rlib {
-        // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
-        // internally to read the file. We also avoid even using a memcpy by
-        // just keeping the archive along while the metadata is in use.
-        let archive = match ArchiveRO::open(filename) {
-            Some(ar) => ar,
-            None => {
-                debug!("llvm didn't like `{}`", filename.display());
-                return Err(format!("failed to read rlib metadata: '{}'", filename.display()));
+    let raw_bytes: ErasedBoxRef<[u8]> = match flavor {
+        CrateFlavor::Rlib => loader.get_rlib_metadata(target, filename)?,
+        CrateFlavor::Dylib => {
+            let buf = loader.get_dylib_metadata(target, filename)?;
+            // The header is uncompressed
+            let header_len = METADATA_HEADER.len();
+            debug!("checking {} bytes of metadata-version stamp", header_len);
+            let header = &buf[..cmp::min(header_len, buf.len())];
+            if header != METADATA_HEADER {
+                return Err(format!("incompatible metadata version found: '{}'",
+                                   filename.display()));
             }
-        };
-        return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) {
-            None => Err(format!("failed to read rlib metadata: '{}'", filename.display())),
-            Some(blob) => {
-                verify_decompressed_encoding_version(&blob, filename)?;
-                Ok(blob)
-            }
-        };
-    } else if flavor == CrateFlavor::Rmeta {
-        let mut file = File::open(filename).map_err(|_|
-            format!("could not open file: '{}'", filename.display()))?;
-        let mut buf = vec![];
-        file.read_to_end(&mut buf).map_err(|_|
-            format!("failed to read rlib metadata: '{}'", filename.display()))?;
-        let blob = MetadataBlob::Raw(buf);
-        verify_decompressed_encoding_version(&blob, filename)?;
-        return Ok(blob);
-    }
-    unsafe {
-        let buf = common::path2cstr(filename);
-        let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
-        if mb as isize == 0 {
-            return Err(format!("error reading library: '{}'", filename.display()));
-        }
-        let of = match ObjectFile::new(mb) {
-            Some(of) => of,
-            _ => {
-                return Err((format!("provided path not an object file: '{}'", filename.display())))
-            }
-        };
-        let si = mk_section_iter(of.llof);
-        while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
-            let mut name_buf = ptr::null();
-            let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
-            let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec();
-            let name = String::from_utf8(name).unwrap();
-            debug!("get_metadata_section: name {}", name);
-            if read_meta_section_name(target) == name {
-                let cbuf = llvm::LLVMGetSectionContents(si.llsi);
-                let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
-                let cvbuf: *const u8 = cbuf as *const u8;
-                let vlen = METADATA_HEADER.len();
-                debug!("checking {} bytes of metadata-version stamp", vlen);
-                let minsz = cmp::min(vlen, csz);
-                let buf0 = slice::from_raw_parts(cvbuf, minsz);
-                let version_ok = buf0 == METADATA_HEADER;
-                if !version_ok {
-                    return Err((format!("incompatible metadata version found: '{}'",
-                                        filename.display())));
-                }
 
-                let cvbuf1 = cvbuf.offset(vlen as isize);
-                debug!("inflating {} bytes of compressed metadata", csz - vlen);
-                let bytes = slice::from_raw_parts(cvbuf1, csz - vlen);
-                match flate::inflate_bytes(bytes) {
-                    Ok(inflated) => {
-                        let blob = MetadataBlob::Inflated(inflated);
-                        verify_decompressed_encoding_version(&blob, filename)?;
-                        return Ok(blob);
-                    }
-                    Err(_) => {}
+            // Header is okay -> inflate the actual metadata
+            let compressed_bytes = &buf[header_len..];
+            debug!("inflating {} bytes of compressed metadata", compressed_bytes.len());
+            match flate::inflate_bytes(compressed_bytes) {
+                Ok(inflated) => {
+                    let buf = unsafe { OwningRef::new_assert_stable_address(inflated) };
+                    buf.map_owner_box().erase_owner()
+                }
+                Err(_) => {
+                    return Err(format!("failed to decompress metadata: {}", filename.display()));
                 }
             }
-            llvm::LLVMMoveToNextSection(si.llsi);
         }
-        Err(format!("metadata not found: '{}'", filename.display()))
-    }
-}
-
-pub fn meta_section_name(target: &Target) -> &'static str {
-    // Historical note:
-    //
-    // When using link.exe it was seen that the section name `.note.rustc`
-    // was getting shortened to `.note.ru`, and according to the PE and COFF
-    // specification:
-    //
-    // > Executable images do not use a string table and do not support
-    // > section names longer than 8 characters
-    //
-    // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
-    //
-    // As a result, we choose a slightly shorter name! As to why
-    // `.note.rustc` works on MinGW, that's another good question...
-
-    if target.options.is_like_osx {
-        "__DATA,.rustc"
+        CrateFlavor::Rmeta => {
+            let mut file = File::open(filename).map_err(|_|
+                format!("could not open file: '{}'", filename.display()))?;
+            let mut buf = vec![];
+            file.read_to_end(&mut buf).map_err(|_|
+                format!("failed to read rmeta metadata: '{}'", filename.display()))?;
+            OwningRef::new(buf).map_owner_box().erase_owner()
+        }
+    };
+    let blob = MetadataBlob(raw_bytes);
+    if blob.is_compatible() {
+        Ok(blob)
     } else {
-        ".rustc"
+        Err(format!("incompatible metadata version found: '{}'", filename.display()))
     }
 }
 
-pub fn read_meta_section_name(_target: &Target) -> &'static str {
-    ".rustc"
-}
-
 // A diagnostic function for dumping crate metadata to an output stream
-pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> {
+pub fn list_file_metadata(target: &Target,
+                          path: &Path,
+                          loader: &MetadataLoader,
+                          out: &mut io::Write)
+                          -> io::Result<()> {
     let filename = path.file_name().unwrap().to_str().unwrap();
     let flavor = if filename.ends_with(".rlib") {
         CrateFlavor::Rlib
@@ -1005,7 +902,7 @@ pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) ->
     } else {
         CrateFlavor::Dylib
     };
-    match get_metadata_section(target, flavor, path) {
+    match get_metadata_section(target, flavor, path, loader) {
         Ok(metadata) => metadata.list_crate_metadata(out),
         Err(msg) => write!(out, "{}\n", msg),
     }
index 5abe1adfb6f359ff9478af0ec7cc5d670cff4757..91a22d922199da6d2e6f3e35192ec9957a9491e1 100644 (file)
@@ -221,6 +221,20 @@ pub fn map<F, R>(&self, f: F) -> Tracked<R>
     }
 }
 
+impl<'a, 'tcx, T> HashStable<StableHashingContext<'a, 'tcx>> for Tracked<T>
+    where T: HashStable<StableHashingContext<'a, 'tcx>>
+{
+    fn hash_stable<W: StableHasherResult>(&self,
+                                          hcx: &mut StableHashingContext<'a, 'tcx>,
+                                          hasher: &mut StableHasher<W>) {
+        let Tracked {
+            ref state
+        } = *self;
+
+        state.hash_stable(hcx, hasher);
+    }
+}
+
 
 #[derive(RustcEncodable, RustcDecodable)]
 pub struct CrateRoot {
index 14f277d1767bbc3faf3915ddec606226c46d6327..eaba573dcd2e582d21dfdc056e87ba601d820e94 100644 (file)
@@ -324,7 +324,9 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
         MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?
     }
 
-    write!(w, " {}", tcx.node_path_str(src.item_id()))?;
+    item_path::with_forced_impl_filename_line(|| { // see notes on #41697 elsewhere
+        write!(w, " {}", tcx.node_path_str(src.item_id()))
+    })?;
 
     if let MirSource::Fn(_) = src {
         write!(w, "(")?;
index cdde56f5f634be75a2d0917a5a1885bf59606e2d..3027489d65be2ea1f955e5e3c5e1c8e611b66280 100644 (file)
@@ -103,7 +103,8 @@ pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxEx
         }
         self.syntax_exts.push((name, match extension {
             NormalTT(ext, _, allow_internal_unstable) => {
-                NormalTT(ext, Some(self.krate_span), allow_internal_unstable)
+                let nid = ast::CRATE_NODE_ID;
+                NormalTT(ext, Some((nid, self.krate_span)), allow_internal_unstable)
             }
             IdentTT(ext, _, allow_internal_unstable) => {
                 IdentTT(ext, Some(self.krate_span), allow_internal_unstable)
index 774e84de36638d84ba4abd4a7773081b1360734a..6ea666e21dcdd28fd24ab0bcd4af1e893269d753 100644 (file)
@@ -1187,6 +1187,10 @@ pub struct Resolver<'a> {
     pub whitelisted_legacy_custom_derives: Vec<Name>,
     pub found_unresolved_macro: bool,
 
+    // List of crate local macros that we need to warn about as being unused.
+    // Right now this only includes macro_rules! macros.
+    unused_macros: FxHashSet<DefId>,
+
     // Maps the `Mark` of an expansion to its containing module or block.
     invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
 
@@ -1392,6 +1396,7 @@ pub fn new(session: &'a Session,
             potentially_unused_imports: Vec::new(),
             struct_constructors: DefIdMap(),
             found_unresolved_macro: false,
+            unused_macros: FxHashSet(),
         }
     }
 
index c08421cb9374eb9efa04bf4183762a66c07287cc..231d30cd2a22decfdd8729f4f68f46dfa16fe305 100644 (file)
@@ -16,7 +16,7 @@
 use rustc::hir::def_id::{DefId, BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, DefIndex};
 use rustc::hir::def::{Def, Export};
 use rustc::hir::map::{self, DefCollector};
-use rustc::ty;
+use rustc::{ty, lint};
 use syntax::ast::{self, Name, Ident};
 use syntax::attr::{self, HasAttrs};
 use syntax::errors::DiagnosticBuilder;
@@ -291,12 +291,32 @@ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
             },
         };
         self.macro_defs.insert(invoc.expansion_data.mark, def.def_id());
+        self.unused_macros.remove(&def.def_id());
         Ok(Some(self.get_macro(def)))
     }
 
     fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
                      -> Result<Rc<SyntaxExtension>, Determinacy> {
-        self.resolve_macro_to_def(scope, path, kind, force).map(|def| self.get_macro(def))
+        self.resolve_macro_to_def(scope, path, kind, force).map(|def| {
+            self.unused_macros.remove(&def.def_id());
+            self.get_macro(def)
+        })
+    }
+
+    fn check_unused_macros(&self) {
+        for did in self.unused_macros.iter() {
+            let id_span = match *self.macro_map[did] {
+                SyntaxExtension::NormalTT(_, isp, _) => isp,
+                _ => None,
+            };
+            if let Some((id, span)) = id_span {
+                let lint = lint::builtin::UNUSED_MACROS;
+                let msg = "unused macro definition".to_string();
+                self.session.add_lint(lint, id, span, msg);
+            } else {
+                bug!("attempted to create unused macro error, but span not available");
+            }
+        }
     }
 }
 
@@ -687,6 +707,8 @@ pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<
         if attr::contains_name(&item.attrs, "macro_export") {
             let def = Def::Macro(def_id, MacroKind::Bang);
             self.macro_exports.push(Export { name: ident.name, def: def, span: item.span });
+        } else {
+            self.unused_macros.insert(def_id);
         }
     }
 
index af477f5a15217bf7ca4c132a5dba9f77a9cb17d1..4ccc85257f3c990ba313df23fa87a9de43476721 100644 (file)
@@ -12,6 +12,7 @@ test = false
 [dependencies]
 flate = { path = "../libflate" }
 log = "0.3"
+owning_ref = "0.3.3"
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
 rustc_bitflags = { path = "../librustc_bitflags" }
index 0f908b7d0698bc48e7bd1fd529d46212c5433a0f..902065c8688d0d808fb6c2c29d25c82d8bbd219a 100644 (file)
@@ -20,6 +20,7 @@
 use libc;
 use llvm::archive_ro::{ArchiveRO, Child};
 use llvm::{self, ArchiveKind};
+use metadata::METADATA_FILENAME;
 use rustc::session::Session;
 
 pub struct ArchiveConfig<'a> {
@@ -158,11 +159,9 @@ pub fn add_rlib(&mut self,
         // Ignoring all bytecode files, no matter of
         // name
         let bc_ext = ".bytecode.deflate";
-        let metadata_filename =
-            self.config.sess.cstore.metadata_filename().to_owned();
 
         self.add_archive(rlib, move |fname: &str| {
-            if fname.ends_with(bc_ext) || fname == metadata_filename {
+            if fname.ends_with(bc_ext) || fname == METADATA_FILENAME {
                 return true
             }
 
index e42e69d2a76e27979718317626a86ee2a51df1de..f85d3f9f54dfd7cd8b63ad152089bfa10e10eca0 100644 (file)
@@ -13,6 +13,7 @@
 use super::rpath::RPathConfig;
 use super::rpath;
 use super::msvc;
+use metadata::METADATA_FILENAME;
 use session::config;
 use session::config::NoDebugInfo;
 use session::config::{OutputFilenames, Input, OutputType};
@@ -521,7 +522,7 @@ fn link_rlib<'a>(sess: &'a Session,
             // contain the metadata in a separate file. We use a temp directory
             // here so concurrent builds in the same directory don't try to use
             // the same filename for metadata (stomping over one another)
-            let metadata = tmpdir.join(sess.cstore.metadata_filename());
+            let metadata = tmpdir.join(METADATA_FILENAME);
             emit_metadata(sess, trans, &metadata);
             ab.add_file(&metadata);
 
@@ -714,6 +715,10 @@ fn link_natively(sess: &Session,
     if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) {
         cmd.args(args);
     }
+    if let Some(ref args) = sess.opts.debugging_opts.pre_link_args {
+        cmd.args(args);
+    }
+    cmd.args(&sess.opts.debugging_opts.pre_link_arg);
 
     let pre_link_objects = if crate_type == config::CrateTypeExecutable {
         &sess.target.target.options.pre_link_objects_exe
@@ -1141,8 +1146,7 @@ fn link_sanitizer_runtime(cmd: &mut Linker,
         archive.update_symbols();
 
         for f in archive.src_files() {
-            if f.ends_with("bytecode.deflate") ||
-                f == sess.cstore.metadata_filename() {
+            if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME {
                     archive.remove_file(&f);
                     continue
                 }
@@ -1217,8 +1221,7 @@ fn add_static_crate(cmd: &mut Linker,
 
             let mut any_objects = false;
             for f in archive.src_files() {
-                if f.ends_with("bytecode.deflate") ||
-                   f == sess.cstore.metadata_filename() {
+                if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME {
                     archive.remove_file(&f);
                     continue
                 }
index 8689e176f7a7b92d8d5d70f1443ea10361acf804..437ced85b2e4ad48ec97d7186ea54f5d50820764 100644 (file)
@@ -34,6 +34,7 @@
 use back::symbol_export::{self, ExportedSymbols};
 use llvm::{ContextRef, Linkage, ModuleRef, ValueRef, Vector, get_param};
 use llvm;
+use metadata;
 use rustc::hir::def_id::LOCAL_CRATE;
 use middle::lang_items::StartFnLangItem;
 use middle::cstore::EncodedMetadata;
@@ -778,8 +779,7 @@ enum MetadataKind {
     };
     unsafe {
         llvm::LLVMSetInitializer(llglobal, llconst);
-        let section_name =
-            tcx.sess.cstore.metadata_section_name(&tcx.sess.target.target);
+        let section_name = metadata::metadata_section_name(&tcx.sess.target.target);
         let name = CString::new(section_name).unwrap();
         llvm::LLVMSetSection(llglobal, name.as_ptr());
 
index 8e633ee59b67d7696323d460c5d553802927f23c..3ac0d88b90d7b7be0f6894313951a6b6595cf235 100644 (file)
@@ -45,6 +45,7 @@
 
 extern crate flate;
 extern crate libc;
+extern crate owning_ref;
 #[macro_use] extern crate rustc;
 extern crate rustc_back;
 extern crate rustc_data_structures;
@@ -70,6 +71,9 @@
 pub use base::trans_crate;
 pub use back::symbol_names::provide;
 
+pub use metadata::LlvmMetadataLoader;
+pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug};
+
 pub mod back {
     pub use rustc::hir::svh;
 
@@ -119,7 +123,9 @@ pub mod back {
 mod declare;
 mod glue;
 mod intrinsic;
+mod llvm_util;
 mod machine;
+mod metadata;
 mod meth;
 mod mir;
 mod monomorphize;
diff --git a/src/librustc_trans/llvm_util.rs b/src/librustc_trans/llvm_util.rs
new file mode 100644 (file)
index 0000000..15f5603
--- /dev/null
@@ -0,0 +1,127 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use syntax_pos::symbol::Symbol;
+use back::write::create_target_machine;
+use llvm;
+use rustc::session::Session;
+use rustc::session::config::PrintRequest;
+use libc::{c_int, c_char};
+use std::ffi::CString;
+
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Once;
+
+pub fn init(sess: &Session) {
+    unsafe {
+        // Before we touch LLVM, make sure that multithreading is enabled.
+        static POISONED: AtomicBool = AtomicBool::new(false);
+        static INIT: Once = Once::new();
+        INIT.call_once(|| {
+            if llvm::LLVMStartMultithreaded() != 1 {
+                // use an extra bool to make sure that all future usage of LLVM
+                // cannot proceed despite the Once not running more than once.
+                POISONED.store(true, Ordering::SeqCst);
+            }
+
+            configure_llvm(sess);
+        });
+
+        if POISONED.load(Ordering::SeqCst) {
+            bug!("couldn't enable multi-threaded LLVM");
+        }
+    }
+}
+
+unsafe fn configure_llvm(sess: &Session) {
+    let mut llvm_c_strs = Vec::new();
+    let mut llvm_args = Vec::new();
+
+    {
+        let mut add = |arg: &str| {
+            let s = CString::new(arg).unwrap();
+            llvm_args.push(s.as_ptr());
+            llvm_c_strs.push(s);
+        };
+        add("rustc"); // fake program name
+        if sess.time_llvm_passes() { add("-time-passes"); }
+        if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
+
+        for arg in &sess.opts.cg.llvm_args {
+            add(&(*arg));
+        }
+    }
+
+    llvm::LLVMInitializePasses();
+
+    llvm::initialize_available_targets();
+
+    llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,
+                                 llvm_args.as_ptr());
+}
+
+// WARNING: the features must be known to LLVM or the feature
+// detection code will walk past the end of the feature array,
+// leading to crashes.
+
+const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"];
+
+const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
+                                                 "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
+                                                 "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
+                                                 "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
+
+const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"];
+
+pub fn target_features(sess: &Session) -> Vec<Symbol> {
+    let target_machine = create_target_machine(sess);
+
+    let whitelist = match &*sess.target.target.arch {
+        "arm" => ARM_WHITELIST,
+        "x86" | "x86_64" => X86_WHITELIST,
+        "hexagon" => HEXAGON_WHITELIST,
+        _ => &[],
+    };
+
+    let mut features = Vec::new();
+    for feat in whitelist {
+        assert_eq!(feat.chars().last(), Some('\0'));
+        if unsafe { llvm::LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
+            features.push(Symbol::intern(&feat[..feat.len() - 1]));
+        }
+    }
+    features
+}
+
+pub fn print_version() {
+    unsafe {
+        println!("LLVM version: {}.{}",
+                 llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());
+    }
+}
+
+pub fn print_passes() {
+    unsafe { llvm::LLVMRustPrintPasses(); }
+}
+
+pub fn print(req: PrintRequest, sess: &Session) {
+    let tm = create_target_machine(sess);
+    unsafe {
+        match req {
+            PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),
+            PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),
+            _ => bug!("rustc_trans can't handle print request: {:?}", req),
+        }
+    }
+}
+
+pub fn enable_llvm_debug() {
+    unsafe { llvm::LLVMRustSetDebug(1); }
+}
diff --git a/src/librustc_trans/metadata.rs b/src/librustc_trans/metadata.rs
new file mode 100644 (file)
index 0000000..2c0148d
--- /dev/null
@@ -0,0 +1,122 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::util::common;
+use rustc::middle::cstore::MetadataLoader;
+use rustc_back::target::Target;
+use llvm;
+use llvm::{False, ObjectFile, mk_section_iter};
+use llvm::archive_ro::ArchiveRO;
+
+use owning_ref::{ErasedBoxRef, OwningRef};
+use std::path::Path;
+use std::ptr;
+use std::slice;
+
+pub const METADATA_FILENAME: &str = "rust.metadata.bin";
+
+pub struct LlvmMetadataLoader;
+
+impl MetadataLoader for LlvmMetadataLoader {
+    fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result<ErasedBoxRef<[u8]>, String> {
+        // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
+        // internally to read the file. We also avoid even using a memcpy by
+        // just keeping the archive along while the metadata is in use.
+        let archive = ArchiveRO::open(filename)
+            .map(|ar| OwningRef::new(box ar))
+            .ok_or_else(|| {
+                            debug!("llvm didn't like `{}`", filename.display());
+                            format!("failed to read rlib metadata: '{}'", filename.display())
+                        })?;
+        let buf: OwningRef<_, [u8]> = archive
+            .try_map(|ar| {
+                ar.iter()
+                    .filter_map(|s| s.ok())
+                    .find(|sect| sect.name() == Some(METADATA_FILENAME))
+                    .map(|s| s.data())
+                    .ok_or_else(|| {
+                                    debug!("didn't find '{}' in the archive", METADATA_FILENAME);
+                                    format!("failed to read rlib metadata: '{}'",
+                                            filename.display())
+                                })
+            })?;
+        Ok(buf.erase_owner())
+    }
+
+    fn get_dylib_metadata(&self,
+                          target: &Target,
+                          filename: &Path)
+                          -> Result<ErasedBoxRef<[u8]>, String> {
+        unsafe {
+            let buf = common::path2cstr(filename);
+            let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
+            if mb as isize == 0 {
+                return Err(format!("error reading library: '{}'", filename.display()));
+            }
+            let of = ObjectFile::new(mb)
+                .map(|of| OwningRef::new(box of))
+                .ok_or_else(|| format!("provided path not an object file: '{}'",
+                                        filename.display()))?;
+            let buf = of.try_map(|of| search_meta_section(of, target, filename))?;
+            Ok(buf.erase_owner())
+        }
+    }
+}
+
+fn search_meta_section<'a>(of: &'a ObjectFile,
+                           target: &Target,
+                           filename: &Path)
+                           -> Result<&'a [u8], String> {
+    unsafe {
+        let si = mk_section_iter(of.llof);
+        while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
+            let mut name_buf = ptr::null();
+            let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
+            let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec();
+            let name = String::from_utf8(name).unwrap();
+            debug!("get_metadata_section: name {}", name);
+            if read_metadata_section_name(target) == name {
+                let cbuf = llvm::LLVMGetSectionContents(si.llsi);
+                let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
+                // The buffer is valid while the object file is around
+                let buf: &'a [u8] = slice::from_raw_parts(cbuf as *const u8, csz);
+                return Ok(buf);
+            }
+            llvm::LLVMMoveToNextSection(si.llsi);
+        }
+    }
+    Err(format!("metadata not found: '{}'", filename.display()))
+}
+
+pub fn metadata_section_name(target: &Target) -> &'static str {
+    // Historical note:
+    //
+    // When using link.exe it was seen that the section name `.note.rustc`
+    // was getting shortened to `.note.ru`, and according to the PE and COFF
+    // specification:
+    //
+    // > Executable images do not use a string table and do not support
+    // > section names longer than 8 characters
+    //
+    // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
+    //
+    // As a result, we choose a slightly shorter name! As to why
+    // `.note.rustc` works on MinGW, that's another good question...
+
+    if target.options.is_like_osx {
+        "__DATA,.rustc"
+    } else {
+        ".rustc"
+    }
+}
+
+fn read_metadata_section_name(_target: &Target) -> &'static str {
+    ".rustc"
+}
index c7ec379b0de25d9fc1181a959ebe42a3407ebd8e..7e70bb92cd6e0f011b57c3baf236412490b09641 100644 (file)
@@ -251,9 +251,9 @@ macro_rules! report_function {
                     let bound_list = unsatisfied_predicates.iter()
                         .map(|p| format!("`{} : {}`", p.self_ty(), p))
                         .collect::<Vec<_>>()
-                        .join("");
+                        .join("\n");
                     err.note(&format!("the method `{}` exists but the following trait bounds \
-                                       were not satisfied: {}",
+                                       were not satisfied:\n{}",
                                       item_name,
                                       bound_list));
                 }
index 70d2867c08ce624df82e9b7e068b07e7b2476e4d..d304d79bc52c8342b8ce1ad65cf1226f2cfb277a 100644 (file)
@@ -1200,7 +1200,7 @@ fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                            impl_id: DefId,
                                            impl_item: &hir::ImplItem)
 {
-    let ancestors = trait_def.ancestors(impl_id);
+    let ancestors = trait_def.ancestors(tcx, impl_id);
 
     let kind = match impl_item.node {
         hir::ImplItemKind::Const(..) => ty::AssociatedKind::Const,
@@ -1330,7 +1330,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let mut invalidated_items = Vec::new();
     let associated_type_overridden = overridden_associated_type.is_some();
     for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
-        let is_implemented = trait_def.ancestors(impl_id)
+        let is_implemented = trait_def.ancestors(tcx, impl_id)
             .defs(tcx, trait_item.name, trait_item.kind)
             .next()
             .map(|node_item| !node_item.node.is_from_trait())
index d40a68e605690d880e34c4bd24a73dafd7004ff1..556bd618c78cbb22e49b90b503f1daad2577d4a6 100644 (file)
@@ -249,6 +249,45 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                     return err_info;
                 }
 
+                // Here we are considering a case of converting
+                // `S<P0...Pn>` to S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
+                // which acts like a pointer to `U`, but carries along some extra data of type `T`:
+                //
+                //     struct Foo<T, U> {
+                //         extra: T,
+                //         ptr: *mut U,
+                //     }
+                //
+                // We might have an impl that allows (e.g.) `Foo<T, [i32; 3]>` to be unsized
+                // to `Foo<T, [i32]>`. That impl would look like:
+                //
+                //   impl<T, U: Unsize<V>, V> CoerceUnsized<Foo<T, V>> for Foo<T, U> {}
+                //
+                // Here `U = [i32; 3]` and `V = [i32]`. At runtime,
+                // when this coercion occurs, we would be changing the
+                // field `ptr` from a thin pointer of type `*mut [i32;
+                // 3]` to a fat pointer of type `*mut [i32]` (with
+                // extra data `3`).  **The purpose of this check is to
+                // make sure that we know how to do this conversion.**
+                //
+                // To check if this impl is legal, we would walk down
+                // the fields of `Foo` and consider their types with
+                // both substitutes. We are looking to find that
+                // exactly one (non-phantom) field has changed its
+                // type, which we will expect to be the pointer that
+                // is becoming fat (we could probably generalize this
+                // to mutiple thin pointers of the same type becoming
+                // fat, but we don't). In this case:
+                //
+                // - `extra` has type `T` before and type `T` after
+                // - `ptr` has type `*mut U` before and type `*mut V` after
+                //
+                // Since just one field changed, we would then check
+                // that `*mut U: CoerceUnsized<*mut V>` is implemented
+                // (in other words, that we know how to do this
+                // conversion). This will work out because `U:
+                // Unsize<V>`, and we have a builtin rule that `*mut
+                // U` can be coerced to `*mut V` if `U: Unsize<V>`.
                 let fields = &def_a.struct_variant().fields;
                 let diff_fields = fields.iter()
                     .enumerate()
@@ -260,8 +299,16 @@ pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                             return None;
                         }
 
-                        // Ignore fields that aren't significantly changed
-                        if let Ok(ok) = infcx.sub_types(false, &cause, b, a) {
+                        // Ignore fields that aren't changed; it may
+                        // be that we could get away with subtyping or
+                        // something more accepting, but we use
+                        // equality because we want to be able to
+                        // perform this check without computing
+                        // variance where possible. (This is because
+                        // we may have to evaluate constraint
+                        // expressions in the course of execution.)
+                        // See e.g. #41936.
+                        if let Ok(ok) = infcx.eq_types(false, &cause, b, a) {
                             if ok.obligations.is_empty() {
                                 return None;
                             }
index 8b9dc20315d25fff85ee0a9bf6a2b51cb6907a59..165be49f7603de1d73ff00a03519187585980996 100644 (file)
@@ -46,8 +46,6 @@ fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) {
         }
 
         enforce_trait_manually_implementable(tcx, impl_def_id, trait_ref.def_id);
-        let trait_def = tcx.trait_def(trait_ref.def_id);
-        trait_def.record_local_impl(tcx, impl_def_id, trait_ref);
     }
 }
 
@@ -117,8 +115,6 @@ pub fn provide(providers: &mut Providers) {
 
 fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                             (_, def_id): (CrateNum, DefId)) {
-    tcx.populate_implementations_for_trait_if_necessary(def_id);
-
     let impls = tcx.hir.trait_impls(def_id);
     for &impl_id in impls {
         check_impl(tcx, impl_id);
index f479dc2e6ab617a60a52a60f8bf091d6b05b0834..ba1d7b18e8c7faeb29dcc40e41f84cf13426d211 100644 (file)
@@ -41,39 +41,10 @@ pub fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) {
     let _task =
         tcx.dep_graph.in_task(DepNode::CoherenceOverlapCheck(trait_def_id));
 
-    let def = tcx.trait_def(trait_def_id);
-
-    // attempt to insert into the specialization graph
-    let insert_result = def.add_impl_for_specialization(tcx, impl_def_id);
-
-    // insertion failed due to overlap
-    if let Err(overlap) = insert_result {
-        let mut err = struct_span_err!(tcx.sess,
-                                       tcx.span_of_impl(impl_def_id).unwrap(),
-                                       E0119,
-                                       "conflicting implementations of trait `{}`{}:",
-                                       overlap.trait_desc,
-                                       overlap.self_desc.clone().map_or(String::new(),
-                                                                        |ty| {
-            format!(" for type `{}`", ty)
-        }));
-
-        match tcx.span_of_impl(overlap.with_impl) {
-            Ok(span) => {
-                err.span_label(span, "first implementation here");
-                err.span_label(tcx.span_of_impl(impl_def_id).unwrap(),
-                               format!("conflicting implementation{}",
-                                        overlap.self_desc
-                                            .map_or(String::new(),
-                                                    |ty| format!(" for `{}`", ty))));
-            }
-            Err(cname) => {
-                err.note(&format!("conflicting implementation in crate `{}`", cname));
-            }
-        }
+    // Trigger building the specialization graph for the trait of this impl.
+    // This will detect any overlap errors.
+    tcx.specialization_graph_of(trait_def_id);
 
-        err.emit();
-    }
 
     // check for overlap with the automatic `impl Trait for Trait`
     if let ty::TyDynamic(ref data, ..) = trait_ref.self_ty().sty {
index 7c6c70024ce9e2453131fa7c4856e16e33e6fd09..cb1bd3e099d54c31ef6b468cf0cc619a1cb69e4a 100644 (file)
@@ -749,12 +749,12 @@ fn trait_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     }
 
     let def_path_hash = tcx.def_path_hash(def_id);
-    let def = ty::TraitDef::new(def_id, unsafety, paren_sugar, def_path_hash);
-
-    if tcx.hir.trait_is_auto(def_id) {
-        def.record_has_default_impl();
-    }
-
+    let has_default_impl = tcx.hir.trait_is_auto(def_id);
+    let def = ty::TraitDef::new(def_id,
+                                unsafety,
+                                paren_sugar,
+                                has_default_impl,
+                                def_path_hash);
     tcx.alloc_trait_def(def)
 }
 
index 0f42ee15ecf6fea1110144fa55135cd10dc15f78..f9ebe3fff5beb951478ecbe9f8442cc48b8cf679 100644 (file)
@@ -1524,67 +1524,6 @@ fn get_state(&self) -> String {
 ```
 "##,
 
-E0119: r##"
-There are conflicting trait implementations for the same type.
-Example of erroneous code:
-
-```compile_fail,E0119
-trait MyTrait {
-    fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
-    fn get(&self) -> usize { 0 }
-}
-
-struct Foo {
-    value: usize
-}
-
-impl MyTrait for Foo { // error: conflicting implementations of trait
-                       //        `MyTrait` for type `Foo`
-    fn get(&self) -> usize { self.value }
-}
-```
-
-When looking for the implementation for the trait, the compiler finds
-both the `impl<T> MyTrait for T` where T is all types and the `impl
-MyTrait for Foo`. Since a trait cannot be implemented multiple times,
-this is an error. So, when you write:
-
-```
-trait MyTrait {
-    fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
-    fn get(&self) -> usize { 0 }
-}
-```
-
-This makes the trait implemented on all types in the scope. So if you
-try to implement it on another one after that, the implementations will
-conflict. Example:
-
-```
-trait MyTrait {
-    fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
-    fn get(&self) -> usize { 0 }
-}
-
-struct Foo;
-
-fn main() {
-    let f = Foo;
-
-    f.get(); // the trait is implemented so we can use it
-}
-```
-"##,
-
 E0120: r##"
 An attempt was made to implement Drop on a trait, which is not allowed: only
 structs and enums can implement Drop. An example causing this error:
index 9e2d85163335c6f066a493070c8b53aa968e2ef0..9a689ed079ee259f8240a652942966c3ddd7ec57 100644 (file)
@@ -19,6 +19,7 @@
 use rustc::hir::map as hir_map;
 use rustc::lint;
 use rustc::util::nodemap::FxHashMap;
+use rustc_trans;
 use rustc_trans::back::link;
 use rustc_resolve as resolve;
 use rustc_metadata::cstore::CStore;
@@ -138,10 +139,11 @@ pub fn run_core(search_paths: SearchPaths,
 
     let dep_graph = DepGraph::new(false);
     let _ignore = dep_graph.in_ignore();
-    let cstore = Rc::new(CStore::new(&dep_graph));
+    let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
     let mut sess = session::build_session_(
         sessopts, &dep_graph, cpath, diagnostic_handler, codemap, cstore.clone()
     );
+    rustc_trans::init(&sess);
     rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
 
     let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs));
index 5db82e23bbf1eb0244d6500e8c0533f21e6f0105..612793e2567f1c35dbf5bd47684578559e0d8fde 100644 (file)
@@ -1177,7 +1177,6 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         let quot = if f.alternate() { "\"" } else { "&quot;" };
         match self.0 {
             Abi::Rust => Ok(()),
-            Abi::C => write!(f, "extern "),
             abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()),
         }
     }
index 60ea6ed1d8d58753f6f5643b1590bde5f4c4372f..570a1980782131131ff3c05dbe52e6ad0fa621a1 100644 (file)
@@ -617,6 +617,11 @@ a.test-arrow:hover{
        top: 0;
 }
 
+h3 > .collapse-toggle, h4 > .collapse-toggle {
+       font-size: 0.8em;
+       top: 5px;
+}
+
 .toggle-wrapper > .collapse-toggle {
        left: -24px;
        margin-top: 0px;
index d5237d629cfc1e7f8f40f2a9c3c9b53c6bdb0825..cfe2fad0fa4695a59d1b840da41c493305086a6c 100644 (file)
@@ -34,6 +34,7 @@
 use rustc_driver::driver::phase_2_configure_and_expand;
 use rustc_metadata::cstore::CStore;
 use rustc_resolve::MakeGlobMap;
+use rustc_trans;
 use rustc_trans::back::link;
 use syntax::ast;
 use syntax::codemap::CodeMap;
@@ -81,10 +82,11 @@ pub fn run(input: &str,
 
     let dep_graph = DepGraph::new(false);
     let _ignore = dep_graph.in_ignore();
-    let cstore = Rc::new(CStore::new(&dep_graph));
+    let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
     let mut sess = session::build_session_(
         sessopts, &dep_graph, Some(input_path.clone()), handler, codemap.clone(), cstore.clone(),
     );
+    rustc_trans::init(&sess);
     rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
     sess.parse_sess.config =
         config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
@@ -229,10 +231,11 @@ fn drop(&mut self) {
     let diagnostic_handler = errors::Handler::with_emitter(true, false, box emitter);
 
     let dep_graph = DepGraph::new(false);
-    let cstore = Rc::new(CStore::new(&dep_graph));
+    let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
     let mut sess = session::build_session_(
         sessopts, &dep_graph, None, diagnostic_handler, codemap, cstore.clone(),
     );
+    rustc_trans::init(&sess);
     rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
 
     let outdir = Mutex::new(TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir"));
index 3896fc20a2dde12924ac2c477b6013f0459d2a65..d0e7defbbbb9a15f0b9af79925d319c8d8e22d59 100644 (file)
@@ -754,6 +754,13 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 }
 
 /// Describes the result of a process after it has terminated.
+///
+/// This `struct` is used to represent the exit status of a child process.
+/// Child processes are created via the [`Command`] struct and their exit
+/// status is exposed through the [`status`] method.
+///
+/// [`Command`]: struct.Command.html
+/// [`status`]: struct.Command.html#method.status
 #[derive(PartialEq, Eq, Clone, Copy, Debug)]
 #[stable(feature = "process", since = "1.0.0")]
 pub struct ExitStatus(imp::ExitStatus);
@@ -788,6 +795,22 @@ pub fn success(&self) -> bool {
     /// On Unix, this will return `None` if the process was terminated
     /// by a signal; `std::os::unix` provides an extension trait for
     /// extracting the signal and other details from the `ExitStatus`.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::process::Command;
+    ///
+    /// let status = Command::new("mkdir")
+    ///                      .arg("projects")
+    ///                      .status()
+    ///                      .expect("failed to execute mkdir");
+    ///
+    /// match status.code() {
+    ///     Some(code) => println!("Exited with status code: {}", code),
+    ///     None       => println!("Process terminated by signal")
+    /// }
+    /// ```
     #[stable(feature = "process", since = "1.0.0")]
     pub fn code(&self) -> Option<i32> {
         self.0.code()
index 253787546c1dc75c3d7340a107b52777eacd4263..3f6c2827a3f937242b2522ccf29904545e6b39d8 100644 (file)
@@ -26,8 +26,22 @@ pub trait OsStringExt {
     /// Creates an `OsString` from a potentially ill-formed UTF-16 slice of
     /// 16-bit code units.
     ///
-    /// This is lossless: calling `.encode_wide()` on the resulting string
+    /// This is lossless: calling [`encode_wide`] on the resulting string
     /// will always return the original code units.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::ffi::OsString;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// // UTF-16 encoding for "Unicode".
+    /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065];
+    ///
+    /// let string = OsString::from_wide(&source[..]);
+    /// ```
+    ///
+    /// [`encode_wide`]: ./trait.OsStrExt.html#tymethod.encode_wide
     #[stable(feature = "rust1", since = "1.0.0")]
     fn from_wide(wide: &[u16]) -> Self;
 }
@@ -42,11 +56,29 @@ fn from_wide(wide: &[u16]) -> OsString {
 /// Windows-specific extensions to `OsStr`.
 #[stable(feature = "rust1", since = "1.0.0")]
 pub trait OsStrExt {
-    /// Re-encodes an `OsStr` as a wide character sequence,
-    /// i.e. potentially ill-formed UTF-16.
+    /// Re-encodes an `OsStr` as a wide character sequence, i.e. potentially
+    /// ill-formed UTF-16.
+    ///
+    /// This is lossless: calling [`OsString::from_wide`] and then
+    /// `encode_wide` on the result will yield the original code units.
+    /// Note that the encoding does not add a final null terminator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::ffi::OsString;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// // UTF-16 encoding for "Unicode".
+    /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065];
+    ///
+    /// let string = OsString::from_wide(&source[..]);
+    ///
+    /// let result: Vec<u16> = string.encode_wide().collect();
+    /// assert_eq!(&source[..], &result[..]);
+    /// ```
     ///
-    /// This is lossless. Note that the encoding does not include a final
-    /// null.
+    /// [`OsString::from_wide`]: ./trait.OsStringExt.html#tymethod.from_wide
     #[stable(feature = "rust1", since = "1.0.0")]
     fn encode_wide(&self) -> EncodeWide;
 }
index d6e2fed56be96b1d5b640dacc05aecc73fe2295f..2d00cb38ec4fcb9ba20d85f642c60c1c6140f22d 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! Windows-specific extensions for the primitives in `std::fs`
+//! Windows-specific extensions for the primitives in the `std::fs` module.
 
 #![stable(feature = "rust1", since = "1.0.0")]
 
@@ -18,7 +18,9 @@
 use sys;
 use sys_common::{AsInnerMut, AsInner};
 
-/// Windows-specific extensions to `File`
+/// Windows-specific extensions to [`File`].
+///
+/// [`File`]: ../../../fs/struct.File.html
 #[stable(feature = "file_offset", since = "1.15.0")]
 pub trait FileExt {
     /// Seeks to a given position and reads a number of bytes.
@@ -35,6 +37,24 @@ pub trait FileExt {
     /// Note that similar to `File::read`, it is not an error to return with a
     /// short read. When returning from such a short read, the file pointer is
     /// still updated.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::io;
+    /// use std::fs::File;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// # fn foo() -> io::Result<()> {
+    /// let mut file = File::open("foo.txt")?;
+    /// let mut buffer = [0; 10];
+    ///
+    /// // Read 10 bytes, starting 72 bytes from the
+    /// // start of the file.
+    /// file.seek_read(&mut buffer[..], 72)?;
+    /// # Ok(())
+    /// # }
+    /// ```
     #[stable(feature = "file_offset", since = "1.15.0")]
     fn seek_read(&self, buf: &mut [u8], offset: u64) -> io::Result<usize>;
 
@@ -52,6 +72,22 @@ pub trait FileExt {
     /// Note that similar to `File::write`, it is not an error to return a
     /// short write. When returning from such a short write, the file pointer
     /// is still updated.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::fs::File;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// # fn foo() -> std::io::Result<()> {
+    /// let mut buffer = File::create("foo.txt")?;
+    ///
+    /// // Write a byte string starting 72 bytes from
+    /// // the start of the file.
+    /// buffer.seek_write(b"some bytes", 72)?;
+    /// # Ok(())
+    /// # }
+    /// ```
     #[stable(feature = "file_offset", since = "1.15.0")]
     fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result<usize>;
 }
@@ -67,81 +103,94 @@ fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result<usize> {
     }
 }
 
-/// Windows-specific extensions to `OpenOptions`
+/// Windows-specific extensions to [`OpenOptions`].
+///
+/// [`OpenOptions`]: ../../../fs/struct.OpenOptions.html
 #[stable(feature = "open_options_ext", since = "1.10.0")]
 pub trait OpenOptionsExt {
-    /// Overrides the `dwDesiredAccess` argument to the call to `CreateFile`
+    /// Overrides the `dwDesiredAccess` argument to the call to [`CreateFile`]
     /// with the specified value.
     ///
     /// This will override the `read`, `write`, and `append` flags on the
     /// `OpenOptions` structure. This method provides fine-grained control over
     /// the permissions to read, write and append data, attributes (like hidden
-    /// and system) and extended attributes.
+    /// and system), and extended attributes.
     ///
     /// # Examples
     ///
     /// ```no_run
     /// use std::fs::OpenOptions;
-    /// use std::os::windows::fs::OpenOptionsExt;
+    /// use std::os::windows::prelude::*;
     ///
     /// // Open without read and write permission, for example if you only need
-    /// // to call `stat()` on the file
+    /// // to call `stat` on the file
     /// let file = OpenOptions::new().access_mode(0).open("foo.txt");
     /// ```
+    ///
+    /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
     #[stable(feature = "open_options_ext", since = "1.10.0")]
     fn access_mode(&mut self, access: u32) -> &mut Self;
 
-    /// Overrides the `dwShareMode` argument to the call to `CreateFile` with
+    /// Overrides the `dwShareMode` argument to the call to [`CreateFile`] with
     /// the specified value.
     ///
     /// By default `share_mode` is set to
-    /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. Specifying
-    /// less permissions denies others to read from, write to and/or delete the
-    /// file while it is open.
+    /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. This allows
+    /// other processes to to read, write, and delete/rename the same file
+    /// while it is open. Removing any of the flags will prevent other
+    /// processes from performing the corresponding operation until the file
+    /// handle is closed.
     ///
     /// # Examples
     ///
     /// ```no_run
     /// use std::fs::OpenOptions;
-    /// use std::os::windows::fs::OpenOptionsExt;
+    /// use std::os::windows::prelude::*;
     ///
     /// // Do not allow others to read or modify this file while we have it open
-    /// // for writing
-    /// let file = OpenOptions::new().write(true)
-    ///                              .share_mode(0)
-    ///                              .open("foo.txt");
+    /// // for writing.
+    /// let file = OpenOptions::new()
+    ///     .write(true)
+    ///     .share_mode(0)
+    ///     .open("foo.txt");
     /// ```
+    ///
+    /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
     #[stable(feature = "open_options_ext", since = "1.10.0")]
     fn share_mode(&mut self, val: u32) -> &mut Self;
 
     /// Sets extra flags for the `dwFileFlags` argument to the call to
-    /// `CreateFile2` (or combines it with `attributes` and `security_qos_flags`
-    /// to set the `dwFlagsAndAttributes` for `CreateFile`).
+    /// [`CreateFile2`] to the specified value (or combines it with
+    /// `attributes` and `security_qos_flags` to set the `dwFlagsAndAttributes`
+    /// for [`CreateFile`]).
     ///
-    /// Custom flags can only set flags, not remove flags set by Rusts options.
-    /// This options overwrites any previously set custom flags.
+    /// Custom flags can only set flags, not remove flags set by Rust's options.
+    /// This option overwrites any previously set custom flags.
     ///
     /// # Examples
     ///
-    /// ```rust,ignore
+    /// ```ignore
     /// extern crate winapi;
+    ///
     /// use std::fs::OpenOptions;
-    /// use std::os::windows::fs::OpenOptionsExt;
-    ///
-    /// let mut options = OpenOptions::new();
-    /// options.create(true).write(true);
-    /// if cfg!(windows) {
-    ///     options.custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE);
-    /// }
-    /// let file = options.open("foo.txt");
+    /// use std::os::windows::prelude::*;
+    ///
+    /// let file = OpenOptions::new()
+    ///     .create(true)
+    ///     .write(true)
+    ///     .custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE)
+    ///     .open("foo.txt");
     /// ```
+    ///
+    /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+    /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
     #[stable(feature = "open_options_ext", since = "1.10.0")]
     fn custom_flags(&mut self, flags: u32) -> &mut Self;
 
-    /// Sets the `dwFileAttributes` argument to the call to `CreateFile2` to
+    /// Sets the `dwFileAttributes` argument to the call to [`CreateFile2`] to
     /// the specified value (or combines it with `custom_flags` and
     /// `security_qos_flags` to set the `dwFlagsAndAttributes` for
-    /// `CreateFile`).
+    /// [`CreateFile`]).
     ///
     /// If a _new_ file is created because it does not yet exist and
     /// `.create(true)` or `.create_new(true)` are specified, the new file is
@@ -155,21 +204,52 @@ pub trait OpenOptionsExt {
     ///
     /// # Examples
     ///
-    /// ```rust,ignore
+    /// ```ignore
     /// extern crate winapi;
+    ///
     /// use std::fs::OpenOptions;
-    /// use std::os::windows::fs::OpenOptionsExt;
+    /// use std::os::windows::prelude::*;
     ///
-    /// let file = OpenOptions::new().write(true).create(true)
-    ///                              .attributes(winapi::FILE_ATTRIBUTE_HIDDEN)
-    ///                              .open("foo.txt");
+    /// let file = OpenOptions::new()
+    ///     .write(true)
+    ///     .create(true)
+    ///     .attributes(winapi::FILE_ATTRIBUTE_HIDDEN)
+    ///     .open("foo.txt");
     /// ```
+    ///
+    /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+    /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
     #[stable(feature = "open_options_ext", since = "1.10.0")]
     fn attributes(&mut self, val: u32) -> &mut Self;
 
-    /// Sets the `dwSecurityQosFlags` argument to the call to `CreateFile2` to
+    /// Sets the `dwSecurityQosFlags` argument to the call to [`CreateFile2`] to
     /// the specified value (or combines it with `custom_flags` and `attributes`
-    /// to set the `dwFlagsAndAttributes` for `CreateFile`).
+    /// to set the `dwFlagsAndAttributes` for [`CreateFile`]).
+    ///
+    /// By default, `security_qos_flags` is set to `SECURITY_ANONYMOUS`. For
+    /// information about possible values, see [Impersonation Levels] on the
+    /// Windows Dev Center site.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::fs::OpenOptions;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// let file = OpenOptions::new()
+    ///     .write(true)
+    ///     .create(true)
+    ///
+    ///     // Sets the flag value to `SecurityIdentification`.
+    ///     .security_qos_flags(1)
+    ///
+    ///     .open("foo.txt");
+    /// ```
+    ///
+    /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+    /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
+    /// [Impersonation Levels]:
+    ///     https://msdn.microsoft.com/en-us/library/windows/desktop/aa379572.aspx
     #[stable(feature = "open_options_ext", since = "1.10.0")]
     fn security_qos_flags(&mut self, flags: u32) -> &mut OpenOptions;
 }
@@ -197,35 +277,136 @@ fn security_qos_flags(&mut self, flags: u32) -> &mut OpenOptions {
     }
 }
 
-/// Extension methods for `fs::Metadata` to access the raw fields contained
+/// Extension methods for [`fs::Metadata`] to access the raw fields contained
 /// within.
+///
+/// The data members that this trait exposes correspond to the members
+/// of the [`BY_HANDLE_FILE_INFORMATION`] structure.
+///
+/// [`fs::Metadata`]: ../../../fs/struct.Metadata.html
+/// [`BY_HANDLE_FILE_INFORMATION`]:
+///     https://msdn.microsoft.com/en-us/library/windows/desktop/aa363788.aspx
 #[stable(feature = "metadata_ext", since = "1.1.0")]
 pub trait MetadataExt {
     /// Returns the value of the `dwFileAttributes` field of this metadata.
     ///
     /// This field contains the file system attribute information for a file
-    /// or directory.
+    /// or directory. For possible values and their descriptions, see
+    /// [File Attribute Constants] in the Windows Dev Center.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::io;
+    /// use std::fs;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// # fn foo() -> io::Result<()> {
+    /// let metadata = fs::metadata("foo.txt")?;
+    /// let attributes = metadata.file_attributes();
+    /// # Ok(())
+    /// # }
+    /// ```
+    ///
+    /// [File Attribute Constants]:
+    ///     https://msdn.microsoft.com/en-us/library/windows/desktop/gg258117.aspx
     #[stable(feature = "metadata_ext", since = "1.1.0")]
     fn file_attributes(&self) -> u32;
 
     /// Returns the value of the `ftCreationTime` field of this metadata.
     ///
-    /// The returned 64-bit value represents the number of 100-nanosecond
-    /// intervals since January 1, 1601 (UTC).
+    /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+    /// which represents the number of 100-nanosecond intervals since
+    /// January 1, 1601 (UTC). The struct is automatically
+    /// converted to a `u64` value, as that is the recommended way
+    /// to use it.
+    ///
+    /// If the underlying filesystem does not support creation time, the
+    /// returned value is 0.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::io;
+    /// use std::fs;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// # fn foo() -> io::Result<()> {
+    /// let metadata = fs::metadata("foo.txt")?;
+    /// let creation_time = metadata.creation_time();
+    /// # Ok(())
+    /// # }
+    /// ```
+    ///
+    /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
     #[stable(feature = "metadata_ext", since = "1.1.0")]
     fn creation_time(&self) -> u64;
 
     /// Returns the value of the `ftLastAccessTime` field of this metadata.
     ///
-    /// The returned 64-bit value represents the number of 100-nanosecond
-    /// intervals since January 1, 1601 (UTC).
+    /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+    /// which represents the number of 100-nanosecond intervals since
+    /// January 1, 1601 (UTC). The struct is automatically
+    /// converted to a `u64` value, as that is the recommended way
+    /// to use it.
+    ///
+    /// For a file, the value specifies the last time that a file was read
+    /// from or written to. For a directory, the value specifies when
+    /// the directory was created. For both files and directories, the
+    /// specified date is correct, but the time of day is always set to
+    /// midnight.
+    ///
+    /// If the underlying filesystem does not support last access time, the
+    /// returned value is 0.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::io;
+    /// use std::fs;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// # fn foo() -> io::Result<()> {
+    /// let metadata = fs::metadata("foo.txt")?;
+    /// let last_access_time = metadata.last_access_time();
+    /// # Ok(())
+    /// # }
+    /// ```
+    ///
+    /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
     #[stable(feature = "metadata_ext", since = "1.1.0")]
     fn last_access_time(&self) -> u64;
 
     /// Returns the value of the `ftLastWriteTime` field of this metadata.
     ///
-    /// The returned 64-bit value represents the number of 100-nanosecond
-    /// intervals since January 1, 1601 (UTC).
+    /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+    /// which represents the number of 100-nanosecond intervals since
+    /// January 1, 1601 (UTC). The struct is automatically
+    /// converted to a `u64` value, as that is the recommended way
+    /// to use it.
+    ///
+    /// For a file, the value specifies the last time that a file was written
+    /// to. For a directory, the structure specifies when the directory was
+    /// created.
+    ///
+    /// If the underlying filesystem does not support the last write time
+    /// time, the returned value is 0.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::io;
+    /// use std::fs;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// # fn foo() -> io::Result<()> {
+    /// let metadata = fs::metadata("foo.txt")?;
+    /// let last_write_time = metadata.last_write_time();
+    /// # Ok(())
+    /// # }
+    /// ```
+    ///
+    /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
     #[stable(feature = "metadata_ext", since = "1.1.0")]
     fn last_write_time(&self) -> u64;
 
@@ -233,6 +414,20 @@ pub trait MetadataExt {
     /// metadata.
     ///
     /// The returned value does not have meaning for directories.
+    ///
+    /// # Examples
+    ///
+    /// ```no_run
+    /// use std::io;
+    /// use std::fs;
+    /// use std::os::windows::prelude::*;
+    ///
+    /// # fn foo() -> io::Result<()> {
+    /// let metadata = fs::metadata("foo.txt")?;
+    /// let file_size = metadata.file_size();
+    /// # Ok(())
+    /// # }
+    /// ```
     #[stable(feature = "metadata_ext", since = "1.1.0")]
     fn file_size(&self) -> u64;
 }
@@ -253,7 +448,7 @@ fn file_size(&self) -> u64 { self.as_inner().size() }
 ///
 /// # Examples
 ///
-/// ```ignore
+/// ```no_run
 /// use std::os::windows::fs;
 ///
 /// # fn foo() -> std::io::Result<()> {
@@ -274,7 +469,7 @@ pub fn symlink_file<P: AsRef<Path>, Q: AsRef<Path>>(src: P, dst: Q)
 ///
 /// # Examples
 ///
-/// ```ignore
+/// ```no_run
 /// use std::os::windows::fs;
 ///
 /// # fn foo() -> std::io::Result<()> {
index f12e50cc92317d80496a67ecb3bd03fddc2f8a11..11b1337a8aec0b933965d39c9cb1b070ad0a44a5 100644 (file)
@@ -8,11 +8,13 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! Experimental extensions to `std` for Windows.
+//! Platform-specific extensions to `std` for Windows.
 //!
-//! For now, this module is limited to extracting handles, file
-//! descriptors, and sockets, but its functionality will grow over
-//! time.
+//! Provides access to platform-level information for Windows, and exposes
+//! Windows-specific idioms that would otherwise be inappropriate as part
+//! the core `std` library. These extensions allow developers to use
+//! `std` types and idioms with Windows in a way that the normal
+//! platform-agnostic idioms would not normally support.
 
 #![stable(feature = "rust1", since = "1.0.0")]
 
index 79aaf34ce2e0f5d68d18ea134f7cefa4275b20f2..df5e4ef1d886e9f589e413b2ba46d54beb4be1be 100644 (file)
@@ -750,6 +750,7 @@ fn size_hint(&self) -> (usize, Option<usize>) {
     }
 }
 
+/// Generates a wide character sequence for potentially ill-formed UTF-16.
 #[stable(feature = "rust1", since = "1.0.0")]
 #[derive(Clone)]
 pub struct EncodeWide<'a> {
index e2b22b1d89f045496e267eed902d2b8e3adf2fe4..c2c6e6cf87dff4ad71fcde7e8f721f342ef3d37c 100644 (file)
 /// A thread local storage key which owns its contents.
 ///
 /// This key uses the fastest possible implementation available to it for the
-/// target platform. It is instantiated with the `thread_local!` macro and the
-/// primary method is the `with` method.
+/// target platform. It is instantiated with the [`thread_local!`] macro and the
+/// primary method is the [`with`] method.
 ///
-/// The `with` method yields a reference to the contained value which cannot be
+/// The [`with`] method yields a reference to the contained value which cannot be
 /// sent across threads or escape the given closure.
 ///
 /// # Initialization and Destruction
 ///
-/// Initialization is dynamically performed on the first call to `with()`
-/// within a thread, and values that implement `Drop` get destructed when a
+/// Initialization is dynamically performed on the first call to [`with`]
+/// within a thread, and values that implement [`Drop`] get destructed when a
 /// thread exits. Some caveats apply, which are explained below.
 ///
 /// # Examples
 /// 3. On macOS, initializing TLS during destruction of other TLS slots can
 ///    sometimes cancel *all* destructors for the current thread, whether or not
 ///    the slots have already had their destructors run or not.
+///
+/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+/// [`thread_local!`]: ../../std/macro.thread_local.html
+/// [`Drop`]: ../../std/ops/trait.Drop.html
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct LocalKey<T: 'static> {
     // This outer `LocalKey<T>` type is what's going to be stored in statics,
@@ -106,7 +110,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-/// Declare a new thread local storage key of type `std::thread::LocalKey`.
+/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
 ///
 /// # Syntax
 ///
@@ -124,8 +128,10 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 /// # fn main() {}
 /// ```
 ///
-/// See [LocalKey documentation](thread/struct.LocalKey.html) for more
+/// See [LocalKey documentation][`std::thread::LocalKey`] for more
 /// information.
+///
+/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
 #[macro_export]
 #[stable(feature = "rust1", since = "1.0.0")]
 #[allow_internal_unstable]
@@ -195,11 +201,13 @@ fn __getit() -> $crate::option::Option<
 #[derive(Debug, Eq, PartialEq, Copy, Clone)]
 pub enum LocalKeyState {
     /// All keys are in this state whenever a thread starts. Keys will
-    /// transition to the `Valid` state once the first call to `with` happens
+    /// transition to the `Valid` state once the first call to [`with`] happens
     /// and the initialization expression succeeds.
     ///
     /// Keys in the `Uninitialized` state will yield a reference to the closure
-    /// passed to `with` so long as the initialization routine does not panic.
+    /// passed to [`with`] so long as the initialization routine does not panic.
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
     Uninitialized,
 
     /// Once a key has been accessed successfully, it will enter the `Valid`
@@ -208,7 +216,9 @@ pub enum LocalKeyState {
     /// `Destroyed` state.
     ///
     /// Keys in the `Valid` state will be guaranteed to yield a reference to the
-    /// closure passed to `with`.
+    /// closure passed to [`with`].
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
     Valid,
 
     /// When a thread exits, the destructors for keys will be run (if
@@ -216,7 +226,9 @@ pub enum LocalKeyState {
     /// destructor has run, a key is in the `Destroyed` state.
     ///
     /// Keys in the `Destroyed` states will trigger a panic when accessed via
-    /// `with`.
+    /// [`with`].
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
     Destroyed,
 }
 
@@ -283,23 +295,26 @@ unsafe fn init(&self, slot: &UnsafeCell<Option<T>>) -> &T {
     /// Query the current state of this key.
     ///
     /// A key is initially in the `Uninitialized` state whenever a thread
-    /// starts. It will remain in this state up until the first call to `with`
+    /// starts. It will remain in this state up until the first call to [`with`]
     /// within a thread has run the initialization expression successfully.
     ///
     /// Once the initialization expression succeeds, the key transitions to the
-    /// `Valid` state which will guarantee that future calls to `with` will
+    /// `Valid` state which will guarantee that future calls to [`with`] will
     /// succeed within the thread.
     ///
     /// When a thread exits, each key will be destroyed in turn, and as keys are
     /// destroyed they will enter the `Destroyed` state just before the
     /// destructor starts to run. Keys may remain in the `Destroyed` state after
     /// destruction has completed. Keys without destructors (e.g. with types
-    /// that are `Copy`), may never enter the `Destroyed` state.
+    /// that are [`Copy`]), may never enter the `Destroyed` state.
     ///
     /// Keys in the `Uninitialized` state can be accessed so long as the
     /// initialization does not panic. Keys in the `Valid` state are guaranteed
     /// to be able to be accessed. Keys in the `Destroyed` state will panic on
-    /// any call to `with`.
+    /// any call to [`with`].
+    ///
+    /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+    /// [`Copy`]: ../../std/marker/trait.Copy.html
     #[unstable(feature = "thread_local_state",
                reason = "state querying was recently added",
                issue = "27716")]
index 200368be275c335e81b03e6b986fb6a8ded6c800..154406a1d8bd78ca29965e9f54f49f4af51fc896 100644 (file)
 // Builder
 ////////////////////////////////////////////////////////////////////////////////
 
-/// Thread configuration. Provides detailed control over the properties
-/// and behavior of new threads.
+/// Thread factory, which can be used in order to configure the properties of
+/// a new thread.
+///
+/// Methods can be chained on it in order to configure it.
+///
+/// The two configurations available are:
+///
+/// - [`name`]: allows to give a name to the thread which is currently
+///   only used in `panic` messages.
+/// - [`stack_size`]: specifies the desired stack size. Note that this can
+///   be overriden by the OS.
+///
+/// If the [`stack_size`] field is not specified, the stack size
+/// will be the `RUST_MIN_STACK` environment variable. If it is
+/// not specified either, a sensible default will be set.
+///
+/// If the [`name`] field is not specified, the thread will not be named.
+///
+/// The [`spawn`] method will take ownership of the builder and create an
+/// [`io::Result`] to the thread handle with the given configuration.
+///
+/// The [`thread::spawn`] free function uses a `Builder` with default
+/// configuration and [`unwrap`]s its return value.
+///
+/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want
+/// to recover from a failure to launch a thread, indeed the free function will
+/// panick where the `Builder` method will return a [`io::Result`].
 ///
 /// # Examples
 ///
 ///
 /// handler.join().unwrap();
 /// ```
+///
+/// [`thread::spawn`]: ../../std/thread/fn.spawn.html
+/// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
+/// [`name`]: ../../std/thread/struct.Builder.html#method.name
+/// [`spawn`]: ../../std/thread/struct.Builder.html#method.spawn
+/// [`io::Result`]: ../../std/io/type.Result.html
+/// [`unwrap`]: ../../std/result/enum.Result.html#method.unwrap
 #[stable(feature = "rust1", since = "1.0.0")]
 #[derive(Debug)]
 pub struct Builder {
@@ -209,11 +241,6 @@ impl Builder {
     /// Generates the base configuration for spawning a thread, from which
     /// configuration methods can be chained.
     ///
-    /// If the [`stack_size`] field is not specified, the stack size
-    /// will be the `RUST_MIN_STACK` environment variable.  If it is
-    /// not specified either, a sensible default will be set (2MB as
-    /// of the writting of this doc).
-    ///
     /// # Examples
     ///
     /// ```
@@ -229,8 +256,6 @@ impl Builder {
     ///
     /// handler.join().unwrap();
     /// ```
-    ///
-    /// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn new() -> Builder {
         Builder {
@@ -280,9 +305,10 @@ pub fn stack_size(mut self, size: usize) -> Builder {
         self
     }
 
-    /// Spawns a new thread, and returns a join handle for it.
+    /// Spawns a new thread by taking ownership of the `Builder`, and returns an
+    /// [`io::Result`] to its [`JoinHandle`].
     ///
-    /// The child thread may outlive the parent (unless the parent thread
+    /// The spawned thread may outlive the caller (unless the caller thread
     /// is the main thread; the whole process is terminated when the main
     /// thread finishes). The join handle can be used to block on
     /// termination of the child thread, including recovering its panics.
@@ -297,6 +323,7 @@ pub fn stack_size(mut self, size: usize) -> Builder {
     ///
     /// [`spawn`]: ../../std/thread/fn.spawn.html
     /// [`io::Result`]: ../../std/io/type.Result.html
+    /// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
     ///
     /// # Examples
     ///
@@ -468,6 +495,23 @@ pub fn current() -> Thread {
 
 /// Cooperatively gives up a timeslice to the OS scheduler.
 ///
+/// This is used when the programmer knows that the thread will have nothing
+/// to do for some time, and thus avoid wasting computing time.
+///
+/// For example when polling on a resource, it is common to check that it is
+/// available, and if not to yield in order to avoid busy waiting.
+///
+/// Thus the pattern of `yield`ing after a failed poll is rather common when
+/// implementing low-level shared resources or synchronization primitives.
+///
+/// However programmers will usualy prefer to use, [`channel`]s, [`Condvar`]s,
+/// [`Mutex`]es or [`join`] for their synchronisation routines, as they avoid
+/// thinking about thread schedulling.
+///
+/// Note that [`channel`]s for example are implemented using this primitive.
+/// Indeed when you call `send` or `recv`, which are blocking, they will yield
+/// if the channel is not available.
+///
 /// # Examples
 ///
 /// ```
@@ -475,6 +519,12 @@ pub fn current() -> Thread {
 ///
 /// thread::yield_now();
 /// ```
+///
+/// [`channel`]: ../../std/sync/mpsc/index.html
+/// [`spawn`]: ../../std/thread/fn.spawn.html
+/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
+/// [`Mutex`]: ../../std/sync/struct.Mutex.html
+/// [`Condvar`]: ../../std/sync/struct.Condvar.html
 #[stable(feature = "rust1", since = "1.0.0")]
 pub fn yield_now() {
     imp::Thread::yield_now()
index e5bb02fe08230253ddfa15006aa864a9879c0a9d..24ce99208ed11bc9e89cda6408003e12fa3207d2 100644 (file)
@@ -715,7 +715,7 @@ pub fn add_trailing_semicolon(mut self) -> Self {
             StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| {
                 (mac, MacStmtStyle::Semicolon, attrs)
             })),
-            node @ _ => node,
+            node => node,
         };
         self
     }
@@ -1076,16 +1076,16 @@ pub fn is_str(&self) -> bool {
     pub fn is_unsuffixed(&self) -> bool {
         match *self {
             // unsuffixed variants
-            LitKind::Str(..) => true,
-            LitKind::ByteStr(..) => true,
-            LitKind::Byte(..) => true,
-            LitKind::Char(..) => true,
-            LitKind::Int(_, LitIntType::Unsuffixed) => true,
-            LitKind::FloatUnsuffixed(..) => true,
+            LitKind::Str(..) |
+            LitKind::ByteStr(..) |
+            LitKind::Byte(..) |
+            LitKind::Char(..) |
+            LitKind::Int(_, LitIntType::Unsuffixed) |
+            LitKind::FloatUnsuffixed(..) |
             LitKind::Bool(..) => true,
             // suffixed variants
-            LitKind::Int(_, LitIntType::Signed(..)) => false,
-            LitKind::Int(_, LitIntType::Unsigned(..)) => false,
+            LitKind::Int(_, LitIntType::Signed(..)) |
+            LitKind::Int(_, LitIntType::Unsigned(..)) |
             LitKind::Float(..) => false,
         }
     }
index 0980b73e80c5c5d8125973de01fb8586db045b2d..45f891d8dc56db911f325fd6ff90b4034066e834 100644 (file)
@@ -112,7 +112,7 @@ impl NestedMetaItem {
     /// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem.
     pub fn meta_item(&self) -> Option<&MetaItem> {
         match self.node {
-            NestedMetaItemKind::MetaItem(ref item) => Some(&item),
+            NestedMetaItemKind::MetaItem(ref item) => Some(item),
             _ => None
         }
     }
@@ -120,7 +120,7 @@ pub fn meta_item(&self) -> Option<&MetaItem> {
     /// Returns the Lit if self is a NestedMetaItemKind::Literal.
     pub fn literal(&self) -> Option<&Lit> {
         match self.node {
-            NestedMetaItemKind::Literal(ref lit) => Some(&lit),
+            NestedMetaItemKind::Literal(ref lit) => Some(lit),
             _ => None
         }
     }
@@ -259,7 +259,7 @@ pub fn value_str(&self) -> Option<Symbol> {
         match self.node {
             MetaItemKind::NameValue(ref v) => {
                 match v.node {
-                    LitKind::Str(ref s, _) => Some((*s).clone()),
+                    LitKind::Str(ref s, _) => Some(*s),
                     _ => None,
                 }
             },
@@ -1217,9 +1217,10 @@ fn token(&self) -> Token {
                 Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
             }
             LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
-            LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
-                true => "true",
-                false => "false",
+            LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
+                "true"
+            } else {
+                "false"
             }))),
         }
     }
@@ -1261,7 +1262,7 @@ fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F)
 
 impl HasAttrs for Vec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
-        &self
+        self
     }
     fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
         f(self)
@@ -1270,7 +1271,7 @@ fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
 
 impl HasAttrs for ThinVec<Attribute> {
     fn attrs(&self) -> &[Attribute] {
-        &self
+        self
     }
     fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
         f(self.into()).into()
index 0c8be1d4f24591822ab0686d3851a912b5c8b457..d32c3ec5f46b1f063bbcfa26ebea609d7b158178 100644 (file)
@@ -485,7 +485,7 @@ pub fn span_until_char(&self, sp: Span, c: char) -> Span {
         match self.span_to_snippet(sp) {
             Ok(snippet) => {
                 let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
-                if snippet.len() > 0 && !snippet.contains('\n') {
+                if !snippet.is_empty() && !snippet.contains('\n') {
                     Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
                 } else {
                     sp
@@ -502,7 +502,7 @@ pub fn def_span(&self, sp: Span) -> Span {
     pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
         for fm in self.files.borrow().iter() {
             if filename == fm.name {
-               (self.dep_tracking_callback.borrow())(&fm);
+               (self.dep_tracking_callback.borrow())(fm);
                 return Some(fm.clone());
             }
         }
index ede8a33df6546db821b3681f93758aa362e43a29..2e98c7d962606cf524f7cbd718f008f507591b4e 100644 (file)
@@ -123,7 +123,7 @@ pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {
                 return false;
             }
 
-            let mis = if !is_cfg(&attr) {
+            let mis = if !is_cfg(attr) {
                 return true;
             } else if let Some(mis) = attr.meta_item_list() {
                 mis
@@ -150,7 +150,7 @@ fn visit_expr_attrs(&mut self, attrs: &[ast::Attribute]) {
         // flag the offending attributes
         for attr in attrs.iter() {
             if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
-                let mut err = feature_err(&self.sess,
+                let mut err = feature_err(self.sess,
                                           "stmt_expr_attributes",
                                           attr.span,
                                           GateIssue::Language,
@@ -258,7 +258,7 @@ pub fn configure_stmt(&mut self, stmt: ast::Stmt) -> Option<ast::Stmt> {
     pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> {
         if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
             if !field.attrs.is_empty() {
-                let mut err = feature_err(&self.sess,
+                let mut err = feature_err(self.sess,
                                           "struct_field_attributes",
                                           field.span,
                                           GateIssue::Language,
@@ -290,7 +290,7 @@ fn visit_struct_field_attrs(&mut self, attrs: &[ast::Attribute]) {
         for attr in attrs.iter() {
             if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
                 let mut err = feature_err(
-                    &self.sess,
+                    self.sess,
                     "struct_field_attributes",
                     attr.span,
                     GateIssue::Language,
index fe5cb87ad59b5c89370490754c45576b6bcad900..73aeb40df840064ae0ffd2ea79ee369a9939d498 100644 (file)
@@ -120,7 +120,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
 
         // URLs can be unavoidably longer than the line limit, so we allow them.
         // Allowed format is: `[name]: https://www.rust-lang.org/`
-        let is_url = |l: &str| l.starts_with('[') && l.contains("]:") && l.contains("http");
+        let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http");
 
         if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) {
             ecx.span_err(span, &format!(
@@ -177,7 +177,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
             if let Err(e) = output_metadata(ecx,
                                             &target_triple,
                                             &crate_name.name.as_str(),
-                                            &diagnostics) {
+                                            diagnostics) {
                 ecx.span_bug(span, &format!(
                     "error writing metadata for triple `{}` and crate `{}`, error: {}, \
                      cause: {:?}",
@@ -227,7 +227,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
 
     MacEager::items(SmallVector::many(vec![
         P(ast::Item {
-            ident: name.clone(),
+            ident: *name,
             attrs: Vec::new(),
             id: ast::DUMMY_NODE_ID,
             node: ast::ItemKind::Const(
index f731c5abdd6a1f7e0ad275f45460e047e1f5779f..1930f61121bb028113f2dc37d5b037f6ca9f2a47 100644 (file)
@@ -535,7 +535,7 @@ pub enum SyntaxExtension {
     ///
     /// The `bool` dictates whether the contents of the macro can
     /// directly use `#[unstable]` things (true == yes).
-    NormalTT(Box<TTMacroExpander>, Option<Span>, bool),
+    NormalTT(Box<TTMacroExpander>, Option<(ast::NodeId, Span)>, bool),
 
     /// A function-like syntax extension that has an extra ident before
     /// the block.
@@ -589,6 +589,7 @@ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
                      -> Result<Option<Rc<SyntaxExtension>>, Determinacy>;
     fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
                      -> Result<Rc<SyntaxExtension>, Determinacy>;
+    fn check_unused_macros(&self);
 }
 
 #[derive(Copy, Clone, Debug)]
@@ -618,6 +619,7 @@ fn resolve_macro(&mut self, _scope: Mark, _path: &ast::Path, _kind: MacroKind,
                      _force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
         Err(Determinacy::Determined)
     }
+    fn check_unused_macros(&self) {}
 }
 
 #[derive(Clone)]
@@ -635,8 +637,8 @@ pub struct ExpansionData {
 }
 
 /// One of these is made during expansion and incrementally updated as we go;
-/// when a macro expansion occurs, the resulting nodes have the backtrace()
-/// -> expn_info of their expansion context stored into their span.
+/// when a macro expansion occurs, the resulting nodes have the `backtrace()
+/// -> expn_info` of their expansion context stored into their span.
 pub struct ExtCtxt<'a> {
     pub parse_sess: &'a parse::ParseSess,
     pub ecfg: expand::ExpansionConfig<'a>,
@@ -698,7 +700,7 @@ pub fn backtrace(&self) -> SyntaxContext {
     /// Returns span for the macro which originally caused the current expansion to happen.
     ///
     /// Stops backtracing at include! boundary.
-    pub fn expansion_cause(&self) -> Span {
+    pub fn expansion_cause(&self) -> Option<Span> {
         let mut ctxt = self.backtrace();
         let mut last_macro = None;
         loop {
@@ -709,12 +711,12 @@ pub fn expansion_cause(&self) -> Span {
                 }
                 ctxt = info.call_site.ctxt;
                 last_macro = Some(info.call_site);
-                return Some(());
+                Some(())
             }).is_none() {
                 break
             }
         }
-        last_macro.expect("missing expansion backtrace")
+        last_macro
     }
 
     pub fn struct_span_warn(&self,
@@ -770,9 +772,9 @@ pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
     }
     pub fn trace_macros_diag(&self) {
         for (sp, notes) in self.expansions.iter() {
-            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro");
+            let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
             for note in notes {
-                db.note(&note);
+                db.note(note);
             }
             db.emit();
         }
@@ -795,11 +797,15 @@ pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
             v.push(self.ident_of(s));
         }
         v.extend(components.iter().map(|s| self.ident_of(s)));
-        return v
+        v
     }
     pub fn name_of(&self, st: &str) -> ast::Name {
         Symbol::intern(st)
     }
+
+    pub fn check_unused_macros(&self) {
+        self.resolver.check_unused_macros();
+    }
 }
 
 /// Extract a string literal from the macro expanded version of `expr`,
index a8aa103f80a8e4a7c0f150d59ebfd2a427cd7517..25e0aed220ab3fb036e5fb37b890b95bb448d671 100644 (file)
@@ -415,19 +415,19 @@ fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
 
         match *ext {
             MultiModifier(ref mac) => {
-                let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+                let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
                 let item = mac.expand(self.cx, attr.span, &meta, item);
                 kind.expect_from_annotatables(item)
             }
             MultiDecorator(ref mac) => {
                 let mut items = Vec::new();
-                let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+                let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
                 mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
                 items.push(item);
                 kind.expect_from_annotatables(items)
             }
             SyntaxExtension::AttrProcMacro(ref mac) => {
-                let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+                let item_toks = stream_for_item(&item, self.cx.parse_sess);
 
                 let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
                 let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
@@ -439,7 +439,7 @@ fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
             }
             _ => {
                 let msg = &format!("macro `{}` may not be used in attributes", attr.path);
-                self.cx.span_err(attr.span, &msg);
+                self.cx.span_err(attr.span, msg);
                 kind.dummy(attr.span)
             }
         }
@@ -454,7 +454,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
         };
         let path = &mac.node.path;
 
-        let ident = ident.unwrap_or(keywords::Invalid.ident());
+        let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
         let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
         let opt_expanded = match *ext {
             NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
@@ -469,7 +469,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     call_site: span,
                     callee: NameAndSpan {
                         format: MacroBang(Symbol::intern(&format!("{}", path))),
-                        span: exp_span,
+                        span: exp_span.map(|(_, s)| s),
                         allow_internal_unstable: allow_internal_unstable,
                     },
                 });
@@ -591,7 +591,7 @@ fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -
             }
             _ => {
                 let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
-                self.cx.span_err(span, &msg);
+                self.cx.span_err(span, msg);
                 kind.dummy(span)
             }
         }
@@ -749,19 +749,15 @@ fn configure<T: HasAttrs>(&mut self, node: T) -> Option<T> {
     fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
         let features = self.cx.ecfg.features.unwrap();
         for attr in attrs.iter() {
-            feature_gate::check_attribute(&attr, &self.cx.parse_sess, features);
+            feature_gate::check_attribute(attr, self.cx.parse_sess, features);
         }
     }
 }
 
 pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
-    for i in 0 .. attrs.len() {
-        if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
-             return Some(attrs.remove(i));
-        }
-    }
-
-    None
+    attrs.iter()
+         .position(|a| !attr::is_known(a) && !is_builtin_attr(a))
+         .map(|i| attrs.remove(i))
 }
 
 // These are pretty nasty. Ideally, we would keep the tokens around, linked from
@@ -923,7 +919,7 @@ fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
                 let result = noop_fold_item(item, self);
                 self.cx.current_expansion.module = orig_module;
                 self.cx.current_expansion.directory_ownership = orig_directory_ownership;
-                return result;
+                result
             }
             // Ensure that test functions are accessible from the test harness.
             ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => {
index d7a85baa3fff56b127eefdea5fcd24be5aa7a677..f8fac847a053ef63c68e00b8ca7bcb7d4c4f2ebd 100644 (file)
@@ -23,7 +23,7 @@
 ///
 /// This is registered as a set of expression syntax extension called quote!
 /// that lifts its argument token-tree to an AST representing the
-/// construction of the same token tree, with token::SubstNt interpreted
+/// construction of the same token tree, with `token::SubstNt` interpreted
 /// as antiquotes (splices).
 
 pub mod rt {
@@ -389,7 +389,7 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
                 result = results.pop().unwrap();
                 result.push(tree);
             }
-            tree @ _ => result.push(tree),
+            tree => result.push(tree),
         }
     }
     result
@@ -612,8 +612,11 @@ fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> {
 #[allow(non_upper_case_globals)]
 fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
     macro_rules! mk_lit {
-        ($name: expr, $suffix: expr, $($args: expr),*) => {{
-            let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
+        ($name: expr, $suffix: expr, $content: expr $(, $count: expr)*) => {{
+            let name = mk_name(cx, sp, ast::Ident::with_empty_ctxt($content));
+            let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![
+                name $(, cx.expr_usize(sp, $count))*
+            ]);
             let suffix = match $suffix {
                 Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))),
                 None => cx.expr_none(sp)
@@ -621,7 +624,8 @@ macro_rules! mk_lit {
             cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix])
         }}
     }
-    match *tok {
+
+    let name = match *tok {
         token::BinOp(binop) => {
             return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec![mk_binop(cx, sp, binop)]);
         }
@@ -639,34 +643,14 @@ macro_rules! mk_lit {
                                 vec![mk_delim(cx, sp, delim)]);
         }
 
-        token::Literal(token::Byte(i), suf) => {
-            let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
-            return mk_lit!("Byte", suf, e_byte);
-        }
-
-        token::Literal(token::Char(i), suf) => {
-            let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
-            return mk_lit!("Char", suf, e_char);
-        }
-
-        token::Literal(token::Integer(i), suf) => {
-            let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
-            return mk_lit!("Integer", suf, e_int);
-        }
-
-        token::Literal(token::Float(fident), suf) => {
-            let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident));
-            return mk_lit!("Float", suf, e_fident);
-        }
-
-        token::Literal(token::Str_(ident), suf) => {
-            return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))
-        }
-
-        token::Literal(token::StrRaw(ident, n), suf) => {
-            return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)),
-                           cx.expr_usize(sp, n))
-        }
+        token::Literal(token::Byte(i), suf) => return mk_lit!("Byte", suf, i),
+        token::Literal(token::Char(i), suf) => return mk_lit!("Char", suf, i),
+        token::Literal(token::Integer(i), suf) => return mk_lit!("Integer", suf, i),
+        token::Literal(token::Float(i), suf) => return mk_lit!("Float", suf, i),
+        token::Literal(token::Str_(i), suf) => return mk_lit!("Str_", suf, i),
+        token::Literal(token::StrRaw(i, n), suf) => return mk_lit!("StrRaw", suf, i, n),
+        token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i),
+        token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n),
 
         token::Ident(ident) => {
             return cx.expr_call(sp,
@@ -688,10 +672,6 @@ macro_rules! mk_lit {
 
         token::Interpolated(_) => panic!("quote! with interpolated token"),
 
-        _ => ()
-    }
-
-    let name = match *tok {
         token::Eq           => "Eq",
         token::Lt           => "Lt",
         token::Le           => "Le",
@@ -706,6 +686,7 @@ macro_rules! mk_lit {
         token::At           => "At",
         token::Dot          => "Dot",
         token::DotDot       => "DotDot",
+        token::DotDotDot    => "DotDotDot",
         token::Comma        => "Comma",
         token::Semi         => "Semi",
         token::Colon        => "Colon",
@@ -718,7 +699,10 @@ macro_rules! mk_lit {
         token::Question     => "Question",
         token::Underscore   => "Underscore",
         token::Eof          => "Eof",
-        _                   => panic!("unhandled token in quote!"),
+
+        token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => {
+            panic!("unhandled token in quote!");
+        }
     };
     mk_token_path(cx, sp, name)
 }
index 22a5776315a0cfe6cdf3c1e72e7de81ab0ef4b92..3cdd3a4b2c31d8a313c863cde1311e5b1a5c8a17 100644 (file)
@@ -35,7 +35,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
                    -> Box<base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "line!");
 
-    let topmost = cx.expansion_cause();
+    let topmost = cx.expansion_cause().unwrap_or(sp);
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
 
     base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
@@ -46,7 +46,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
                   -> Box<base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "column!");
 
-    let topmost = cx.expansion_cause();
+    let topmost = cx.expansion_cause().unwrap_or(sp);
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
 
     base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32))
@@ -59,7 +59,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
                    -> Box<base::MacResult+'static> {
     base::check_zero_tts(cx, sp, tts, "file!");
 
-    let topmost = cx.expansion_cause();
+    let topmost = cx.expansion_cause().unwrap_or(sp);
     let loc = cx.codemap().lookup_char_pos(topmost.lo);
     base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name)))
 }
@@ -150,7 +150,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
             cx.span_err(sp,
                         &format!("{} wasn't a utf-8 file",
                                 file.display()));
-            return DummyResult::expr(sp);
+            DummyResult::expr(sp)
         }
     }
 }
@@ -167,7 +167,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
         Err(e) => {
             cx.span_err(sp,
                         &format!("couldn't read {}: {}", file.display(), e));
-            return DummyResult::expr(sp);
+            DummyResult::expr(sp)
         }
         Ok(..) => {
             // Add this input file to the code map to make it available as
index eb0b7c29f8d9ad8d74aa15a283c7b12f5ea1c608..bf66aa0f00bed42409ffbdc7ffe8cc3341d2a787 100644 (file)
 //! repetitions indicated by Kleene stars. It only advances or calls out to the
 //! real Rust parser when no `cur_eis` items remain
 //!
-//! Example: Start parsing `a a a a b` against [· a $( a )* a b].
+//! Example:
 //!
-//! Remaining input: `a a a a b`
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
 //! next_eis: [· a $( a )* a b]
 //!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
 //!
-//! Remaining input: `a a a b`
+//! Remaining input: a a a b
 //! cur: [a · $( a )* a b]
 //! Descend/Skip (first item).
 //! next: [a $( · a )* a b]  [a $( a )* · a b].
 //!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
 //!
-//! Remaining input: `a a b`
+//! Remaining input: a a b
 //! cur: [a $( a · )* a b]  next: [a $( a )* a · b]
 //! Finish/Repeat (first item)
 //! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
 //!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
 //!
-//! Remaining input: `a b`
+//! Remaining input: a b
 //! cur: [a $( a · )* a b]  next: [a $( a )* a · b]
 //! Finish/Repeat (first item)
 //! next: [a $( a )* · a b]  [a $( · a )* a b]  [a $( a )* a · b]
 //!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
 //!
-//! Remaining input: `b`
+//! Remaining input: b
 //! cur: [a $( a · )* a b]  next: [a $( a )* a · b]
 //! Finish/Repeat (first item)
 //! next: [a $( a )* · a b]  [a $( · a )* a b]
 //!
-//! - - - Advance over a `b`. - - -
+//! - - - Advance over a b. - - -
 //!
-//! Remaining input: ``
+//! Remaining input: ''
 //! eof: [a $( a )* a b ·]
+//! ```
 
 pub use self::NamedMatch::*;
 pub use self::ParseResult::*;
@@ -178,20 +182,20 @@ fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
     })
 }
 
-/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
+/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
 /// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
-/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
-/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
+/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
+/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
 ///
-/// The in-memory structure of a particular NamedMatch represents the match
+/// The in-memory structure of a particular `NamedMatch` represents the match
 /// that occurred when a particular subset of a matcher was applied to a
 /// particular token tree.
 ///
-/// The width of each MatchedSeq in the NamedMatch, and the identity of the
-/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
-/// each MatchedSeq corresponds to a single TTSeq in the originating
-/// token tree. The depth of the NamedMatch structure will therefore depend
+/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
+/// the `MatchedNonterminal`s, will depend on the token tree it was applied
+/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
+/// token tree. The depth of the `NamedMatch` structure will therefore depend
 /// only on the nesting depth of `ast::TTSeq`s in the originating
 /// token tree it was derived from.
 
@@ -267,11 +271,12 @@ pub fn parse_failure_msg(tok: Token) -> String {
 
 /// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
 fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
-    match (t1,t2) {
-        (&token::Ident(id1),&token::Ident(id2))
-        | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
-            id1.name == id2.name,
-        _ => *t1 == *t2
+    if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
+        id1.name == id2.name
+    } else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
+        id1.name == id2.name
+    } else {
+        *t1 == *t2
     }
 }
 
@@ -334,7 +339,7 @@ fn inner_parse_loop(sess: &ParseSess,
                 // Check if we need a separator
                 if idx == len && ei.sep.is_some() {
                     // We have a separator, and it is the current token.
-                    if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) {
+                    if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
                         ei.idx += 1;
                         next_eis.push(ei);
                     }
@@ -401,7 +406,7 @@ fn inner_parse_loop(sess: &ParseSess,
                     cur_eis.push(ei);
                 }
                 TokenTree::Token(_, ref t) => {
-                    if token_name_eq(t, &token) {
+                    if token_name_eq(t, token) {
                         ei.idx += 1;
                         next_eis.push(ei);
                     }
@@ -485,11 +490,8 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op
 }
 
 fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
-    match name {
-        "tt" => {
-            return token::NtTT(p.parse_token_tree());
-        }
-        _ => {}
+    if name == "tt" {
+        return token::NtTT(p.parse_token_tree());
     }
     // check at the beginning and the parser checks after each bump
     p.process_potential_macro_variable();
index f959ccc989e2e9e9d55643966f0fe4648b8bd17b..a208f530602a5f0b05677e27c261e29971871e78 100644 (file)
@@ -94,7 +94,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                           -> Box<MacResult+'cx> {
     if cx.trace_macros() {
         let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
-        let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert(vec![]);
+        let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
         values.push(format!("expands to `{}! {{ {} }}`", name, arg));
     }
 
@@ -206,7 +206,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
     let mut valid = true;
 
     // Extract the arguments:
-    let lhses = match **argument_map.get(&lhs_nm).unwrap() {
+    let lhses = match *argument_map[&lhs_nm] {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
@@ -222,7 +222,7 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
         _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
     };
 
-    let rhses = match **argument_map.get(&rhs_nm).unwrap() {
+    let rhses = match *argument_map[&rhs_nm] {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
@@ -252,7 +252,9 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
         valid: valid,
     });
 
-    NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable"))
+    NormalTT(exp,
+             Some((def.id, def.span)),
+             attr::contains_name(&def.attrs, "allow_internal_unstable"))
 }
 
 fn check_lhs_nt_follows(sess: &ParseSess,
@@ -260,13 +262,12 @@ fn check_lhs_nt_follows(sess: &ParseSess,
                         lhs: &quoted::TokenTree) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    match lhs {
-        &quoted::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts),
-        _ => {
-            let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
-            sess.span_diagnostic.span_err(lhs.span(), msg);
-            false
-        }
+    if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
+        check_matcher(sess, features, &tts.tts)
+    } else {
+        let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+        sess.span_diagnostic.span_err(lhs.span(), msg);
+        false
     }
     // we don't abort on errors on rejection, the driver will do that for us
     // after parsing/expansion. we can report every error in every macro this way.
@@ -283,17 +284,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
                 return false;
             },
             TokenTree::Sequence(span, ref seq) => {
-                if seq.separator.is_none() {
-                    if seq.tts.iter().all(|seq_tt| {
-                        match *seq_tt {
-                            TokenTree::Sequence(_, ref sub_seq) =>
-                                sub_seq.op == quoted::KleeneOp::ZeroOrMore,
-                            _ => false,
-                        }
-                    }) {
-                        sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
-                        return false;
+                if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
+                    match *seq_tt {
+                        TokenTree::Sequence(_, ref sub_seq) =>
+                            sub_seq.op == quoted::KleeneOp::ZeroOrMore,
+                        _ => false,
                     }
+                }) {
+                    sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+                    return false;
                 }
                 if !check_lhs_no_empty_seq(sess, &seq.tts) {
                     return false;
@@ -407,7 +406,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
                 }
             }
 
-            return first;
+            first
         }
     }
 
@@ -469,7 +468,7 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
         // we only exit the loop if `tts` was empty or if every
         // element of `tts` matches the empty sequence.
         assert!(first.maybe_empty);
-        return first;
+        first
     }
 }
 
@@ -579,7 +578,7 @@ fn check_matcher_core(sess: &ParseSess,
         let build_suffix_first = || {
             let mut s = first_sets.first(suffix);
             if s.maybe_empty { s.add_all(follow); }
-            return s;
+            s
         };
 
         // (we build `suffix_first` on demand below; you can tell
@@ -861,6 +860,7 @@ fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
         quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
-        _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
+        _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+                     in follow set checker"),
     }
 }
index d216effbd450812502d4a1c675cb933474f6ab9c..fa65e9501c2bb793aa34c64956aaff717bb4227d 100644 (file)
@@ -96,6 +96,17 @@ pub fn len(&self) -> usize {
         }
     }
 
+    pub fn is_empty(&self) -> bool {
+        match *self {
+            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+                token::NoDelim => delimed.tts.is_empty(),
+                _ => false,
+            },
+            TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
+            _ => true,
+        }
+    }
+
     pub fn get_tt(&self, index: usize) -> TokenTree {
         match (self, index) {
             (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
@@ -144,9 +155,9 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
                             }
                             _ => end_sp,
                         },
-                        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                        tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
                     },
-                    tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+                    tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
                 };
                 sess.missing_fragment_specifiers.borrow_mut().insert(span);
                 result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
@@ -228,10 +239,10 @@ fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
                     Some(op) => return (Some(tok), op),
                     None => span,
                 },
-                tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
             }
         },
-        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+        tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
     };
 
     sess.span_diagnostic.span_err(span, "expected `*` or `+`");
index 947089b0b9ac4b977686c26603f47b06664545f9..2a435bdea107f2d11d2bf22a0f20005dc0f589aa 100644 (file)
@@ -121,20 +121,20 @@ pub fn transcribe(sp_diag: &Handler,
                                          &repeats) {
                     LockstepIterSize::Unconstrained => {
                         panic!(sp_diag.span_fatal(
-                            sp.clone(), /* blame macro writer */
+                            sp, /* blame macro writer */
                             "attempted to repeat an expression \
                              containing no syntax \
                              variables matched as repeating at this depth"));
                     }
                     LockstepIterSize::Contradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
+                        panic!(sp_diag.span_fatal(sp, &msg[..]));
                     }
                     LockstepIterSize::Constraint(len, _) => {
                         if len == 0 {
                             if seq.op == quoted::KleeneOp::OneOrMore {
                                 // FIXME #2887 blame invoker
-                                panic!(sp_diag.span_fatal(sp.clone(),
+                                panic!(sp_diag.span_fatal(sp,
                                                           "this must repeat at least once"));
                             }
                         } else {
index b6a2c983fd4d7fb687abc39bea3ea65ddb14a844..09090ab87313087ac393082c21f6f95a0c335a83 100644 (file)
@@ -472,7 +472,7 @@ pub enum Stability {
 impl ::std::fmt::Debug for AttributeGate {
     fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
         match *self {
-            Gated(ref stab, ref name, ref expl, _) =>
+            Gated(ref stab, name, expl, _) =>
                 write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
             Ungated => write!(fmt, "Ungated")
         }
@@ -816,7 +816,7 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
 ];
 
 // cfg(...)'s that are feature gated
-const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[
+const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
     // (name in cfg, feature, function to check if the feature is enabled)
     ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
     ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
@@ -881,7 +881,7 @@ fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
         let name = unwrap_or!(attr.name(), return).as_str();
         for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
             if name == n {
-                if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
+                if let Gated(_, name, desc, ref has_feature) = *gateage {
                     gate_feature_fn!(self, has_feature, attr.span, name, desc);
                 }
                 debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
@@ -1098,7 +1098,7 @@ fn contains_novel_literal(item: &ast::MetaItem) -> bool {
         NameValue(ref lit) => !lit.node.is_str(),
         List(ref list) => list.iter().any(|li| {
             match li.node {
-                MetaItem(ref mi) => contains_novel_literal(&mi),
+                MetaItem(ref mi) => contains_novel_literal(mi),
                 Literal(_) => true,
             }
         }),
@@ -1120,7 +1120,7 @@ fn visit_attribute(&mut self, attr: &ast::Attribute) {
             return
         }
 
-        let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
+        let meta = panictry!(attr.parse_meta(self.context.parse_sess));
         if contains_novel_literal(&meta) {
             gate_feature_post!(&self, attr_literals, attr.span,
                                "non-string literals in attributes, or string \
@@ -1216,14 +1216,11 @@ fn visit_item(&mut self, i: &'a ast::Item) {
             }
 
             ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
-                match polarity {
-                    ast::ImplPolarity::Negative => {
-                        gate_feature_post!(&self, optin_builtin_traits,
-                                           i.span,
-                                           "negative trait bounds are not yet fully implemented; \
-                                            use marker types for now");
-                    },
-                    _ => {}
+                if polarity == ast::ImplPolarity::Negative {
+                    gate_feature_post!(&self, optin_builtin_traits,
+                                       i.span,
+                                       "negative trait bounds are not yet fully implemented; \
+                                        use marker types for now");
                 }
 
                 if let ast::Defaultness::Default = defaultness {
@@ -1272,11 +1269,9 @@ fn visit_ty(&mut self, ty: &'a ast::Ty) {
 
     fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) {
         if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty {
-            match output_ty.node {
-                ast::TyKind::Never => return,
-                _ => (),
-            };
-            self.visit_ty(output_ty)
+            if output_ty.node != ast::TyKind::Never {
+                self.visit_ty(output_ty)
+            }
         }
     }
 
@@ -1373,17 +1368,14 @@ fn visit_fn(&mut self,
                 span: Span,
                 _node_id: NodeId) {
         // check for const fn declarations
-        match fn_kind {
-            FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) => {
-                gate_feature_post!(&self, const_fn, span, "const fn is unstable");
-            }
-            _ => {
-                // stability of const fn methods are covered in
-                // visit_trait_item and visit_impl_item below; this is
-                // because default methods don't pass through this
-                // point.
-            }
+        if let FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) =
+            fn_kind {
+            gate_feature_post!(&self, const_fn, span, "const fn is unstable");
         }
+        // stability of const fn methods are covered in
+        // visit_trait_item and visit_impl_item below; this is
+        // because default methods don't pass through this
+        // point.
 
         match fn_kind {
             FnKind::ItemFn(_, _, _, _, abi, _, _) |
index 06335584c96108571e0136a1ba84a052270820e9..f37dcfdde8985f7594b3381e8e7cf4e57a2a889f 100644 (file)
@@ -337,7 +337,7 @@ fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
                       })
                      .collect()
              })
-            .unwrap_or(vec![])
+            .unwrap_or_else(|_| vec![])
     }
 }
 
index 92cec462ffb7cefbdd623490c125dab5a4e5f4bf..082930777e598cfe4a9392e1dd7e18106ff648fa 100644 (file)
@@ -62,7 +62,7 @@ pub fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
                 _ => break,
             }
         }
-        return Ok(attrs);
+        Ok(attrs)
     }
 
     /// Matches `attribute = # ! [ meta_item ]`
@@ -182,7 +182,7 @@ pub fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
                     }
 
                     let attr = self.parse_attribute(true)?;
-                    assert!(attr.style == ast::AttrStyle::Inner);
+                    assert_eq!(attr.style, ast::AttrStyle::Inner);
                     attrs.push(attr);
                 }
                 token::DocComment(s) => {
index 4fe4ec7e4c0ed3f785fb498ded087152b4b67892..0c6f09ba7666cb0ed6b5896194c75794f1f2d5d1 100644 (file)
@@ -43,14 +43,14 @@ pub fn expr_is_simple_block(e: &ast::Expr) -> bool {
 }
 
 /// this statement requires a semicolon after it.
-/// note that in one case (stmt_semi), we've already
+/// note that in one case (`stmt_semi`), we've already
 /// seen the semicolon, and thus don't need another.
 pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
     match *stmt {
         ast::StmtKind::Local(_) => true,
-        ast::StmtKind::Item(_) => false,
         ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
-        ast::StmtKind::Semi(..) => false,
+        ast::StmtKind::Item(_) |
+        ast::StmtKind::Semi(..) |
         ast::StmtKind::Mac(..) => false,
     }
 }
index b57708f9193a39c4daed2ba0be23ce010c1ee45f..fe931f7cf6a645f4025baea781dc4e5a89ac2460 100644 (file)
@@ -12,7 +12,7 @@
 
 use parse::token;
 
-/// SeqSep : a sequence separator (token)
+/// `SeqSep` : a sequence separator (token)
 /// and whether a trailing separator is allowed.
 pub struct SeqSep {
     pub sep: Option<token::Token>,
index 7ac322b144c7ea46c9641fbb719f87909e0f6998..8b545d3b909e823c472a1224219a723ce53731cf 100644 (file)
@@ -77,7 +77,7 @@ fn vertical_trim(lines: Vec<String>) -> Vec<String> {
         while j > i && lines[j - 1].trim().is_empty() {
             j -= 1;
         }
-        lines[i..j].iter().cloned().collect()
+        lines[i..j].to_vec()
     }
 
     /// remove a "[ \t]*\*" block from each line, if possible
index a83b19c7334e7b70b33dac1ad6db07d5b3c3ae7d..0bcd457851890c6db66ecbae0d988c990cbf3865 100644 (file)
@@ -144,7 +144,7 @@ pub fn peek(&self) -> TokenAndSpan {
 
 impl<'a> StringReader<'a> {
     /// For comments.rs, which hackily pokes into next_pos and ch
-    pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
+    pub fn new_raw(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
         let mut sr = StringReader::new_raw_internal(sess, filemap);
         sr.bump();
         sr
@@ -180,7 +180,7 @@ fn new_raw_internal(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Se
 
     pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
         let mut sr = StringReader::new_raw(sess, filemap);
-        if let Err(_) = sr.advance_token() {
+        if sr.advance_token().is_err() {
             sr.emit_fatal_errors();
             panic!(FatalError);
         }
@@ -205,7 +205,7 @@ pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
 
         sr.bump();
 
-        if let Err(_) = sr.advance_token() {
+        if sr.advance_token().is_err() {
             sr.emit_fatal_errors();
             panic!(FatalError);
         }
@@ -525,7 +525,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
                         self.bump();
                     }
 
-                    return if doc_comment {
+                    if doc_comment {
                         self.with_str_from(start_bpos, |string| {
                             // comments with only more "/"s are not doc comments
                             let tok = if is_doc_comment(string) {
@@ -544,7 +544,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
                             tok: token::Comment,
                             sp: mk_sp(start_bpos, self.pos),
                         })
-                    };
+                    }
                 }
                 Some('*') => {
                     self.bump();
@@ -764,7 +764,7 @@ fn scan_number(&mut self, c: char) -> token::Lit {
             }
             let pos = self.pos;
             self.check_float_base(start_bpos, pos, base);
-            return token::Float(self.name_from(start_bpos));
+            token::Float(self.name_from(start_bpos))
         } else {
             // it might be a float if it has an exponent
             if self.ch_is('e') || self.ch_is('E') {
@@ -774,7 +774,7 @@ fn scan_number(&mut self, c: char) -> token::Lit {
                 return token::Float(self.name_from(start_bpos));
             }
             // but we certainly have an integer!
-            return token::Integer(self.name_from(start_bpos));
+            token::Integer(self.name_from(start_bpos))
         }
     }
 
@@ -1051,9 +1051,9 @@ fn binop(&mut self, op: token::BinOpToken) -> token::Token {
         self.bump();
         if self.ch_is('=') {
             self.bump();
-            return token::BinOpEq(op);
+            token::BinOpEq(op)
         } else {
-            return token::BinOp(op);
+            token::BinOp(op)
         }
     }
 
@@ -1100,15 +1100,15 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
             // One-byte tokens.
             ';' => {
                 self.bump();
-                return Ok(token::Semi);
+                Ok(token::Semi)
             }
             ',' => {
                 self.bump();
-                return Ok(token::Comma);
+                Ok(token::Comma)
             }
             '.' => {
                 self.bump();
-                return if self.ch_is('.') {
+                if self.ch_is('.') {
                     self.bump();
                     if self.ch_is('.') {
                         self.bump();
@@ -1118,61 +1118,61 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     }
                 } else {
                     Ok(token::Dot)
-                };
+                }
             }
             '(' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Paren));
+                Ok(token::OpenDelim(token::Paren))
             }
             ')' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Paren));
+                Ok(token::CloseDelim(token::Paren))
             }
             '{' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Brace));
+                Ok(token::OpenDelim(token::Brace))
             }
             '}' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Brace));
+                Ok(token::CloseDelim(token::Brace))
             }
             '[' => {
                 self.bump();
-                return Ok(token::OpenDelim(token::Bracket));
+                Ok(token::OpenDelim(token::Bracket))
             }
             ']' => {
                 self.bump();
-                return Ok(token::CloseDelim(token::Bracket));
+                Ok(token::CloseDelim(token::Bracket))
             }
             '@' => {
                 self.bump();
-                return Ok(token::At);
+                Ok(token::At)
             }
             '#' => {
                 self.bump();
-                return Ok(token::Pound);
+                Ok(token::Pound)
             }
             '~' => {
                 self.bump();
-                return Ok(token::Tilde);
+                Ok(token::Tilde)
             }
             '?' => {
                 self.bump();
-                return Ok(token::Question);
+                Ok(token::Question)
             }
             ':' => {
                 self.bump();
                 if self.ch_is(':') {
                     self.bump();
-                    return Ok(token::ModSep);
+                    Ok(token::ModSep)
                 } else {
-                    return Ok(token::Colon);
+                    Ok(token::Colon)
                 }
             }
 
             '$' => {
                 self.bump();
-                return Ok(token::Dollar);
+                Ok(token::Dollar)
             }
 
             // Multi-byte tokens.
@@ -1180,21 +1180,21 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 self.bump();
                 if self.ch_is('=') {
                     self.bump();
-                    return Ok(token::EqEq);
+                    Ok(token::EqEq)
                 } else if self.ch_is('>') {
                     self.bump();
-                    return Ok(token::FatArrow);
+                    Ok(token::FatArrow)
                 } else {
-                    return Ok(token::Eq);
+                    Ok(token::Eq)
                 }
             }
             '!' => {
                 self.bump();
                 if self.ch_is('=') {
                     self.bump();
-                    return Ok(token::Ne);
+                    Ok(token::Ne)
                 } else {
-                    return Ok(token::Not);
+                    Ok(token::Not)
                 }
             }
             '<' => {
@@ -1202,21 +1202,21 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 match self.ch.unwrap_or('\x00') {
                     '=' => {
                         self.bump();
-                        return Ok(token::Le);
+                        Ok(token::Le)
                     }
                     '<' => {
-                        return Ok(self.binop(token::Shl));
+                        Ok(self.binop(token::Shl))
                     }
                     '-' => {
                         self.bump();
                         match self.ch.unwrap_or('\x00') {
                             _ => {
-                                return Ok(token::LArrow);
+                                Ok(token::LArrow)
                             }
                         }
                     }
                     _ => {
-                        return Ok(token::Lt);
+                        Ok(token::Lt)
                     }
                 }
             }
@@ -1225,13 +1225,13 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 match self.ch.unwrap_or('\x00') {
                     '=' => {
                         self.bump();
-                        return Ok(token::Ge);
+                        Ok(token::Ge)
                     }
                     '>' => {
-                        return Ok(self.binop(token::Shr));
+                        Ok(self.binop(token::Shr))
                     }
                     _ => {
-                        return Ok(token::Gt);
+                        Ok(token::Gt)
                     }
                 }
             }
@@ -1301,7 +1301,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 };
                 self.bump(); // advance ch past token
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::Char(id), suffix));
+                Ok(token::Literal(token::Char(id), suffix))
             }
             'b' => {
                 self.bump();
@@ -1312,7 +1312,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     _ => unreachable!(),  // Should have been a token::Ident above.
                 };
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(lit, suffix));
+                Ok(token::Literal(lit, suffix))
             }
             '"' => {
                 let start_bpos = self.pos;
@@ -1343,7 +1343,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                 };
                 self.bump();
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::Str_(id), suffix));
+                Ok(token::Literal(token::Str_(id), suffix))
             }
             'r' => {
                 let start_bpos = self.pos;
@@ -1414,24 +1414,24 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     Symbol::intern("??")
                 };
                 let suffix = self.scan_optional_raw_name();
-                return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
+                Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
             }
             '-' => {
                 if self.nextch_is('>') {
                     self.bump();
                     self.bump();
-                    return Ok(token::RArrow);
+                    Ok(token::RArrow)
                 } else {
-                    return Ok(self.binop(token::Minus));
+                    Ok(self.binop(token::Minus))
                 }
             }
             '&' => {
                 if self.nextch_is('&') {
                     self.bump();
                     self.bump();
-                    return Ok(token::AndAnd);
+                    Ok(token::AndAnd)
                 } else {
-                    return Ok(self.binop(token::And));
+                    Ok(self.binop(token::And))
                 }
             }
             '|' => {
@@ -1439,27 +1439,27 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                     Some('|') => {
                         self.bump();
                         self.bump();
-                        return Ok(token::OrOr);
+                        Ok(token::OrOr)
                     }
                     _ => {
-                        return Ok(self.binop(token::Or));
+                        Ok(self.binop(token::Or))
                     }
                 }
             }
             '+' => {
-                return Ok(self.binop(token::Plus));
+                Ok(self.binop(token::Plus))
             }
             '*' => {
-                return Ok(self.binop(token::Star));
+                Ok(self.binop(token::Star))
             }
             '/' => {
-                return Ok(self.binop(token::Slash));
+                Ok(self.binop(token::Slash))
             }
             '^' => {
-                return Ok(self.binop(token::Caret));
+                Ok(self.binop(token::Caret))
             }
             '%' => {
-                return Ok(self.binop(token::Percent));
+                Ok(self.binop(token::Percent))
             }
             c => {
                 let last_bpos = self.pos;
@@ -1468,7 +1468,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
                                                           bpos,
                                                           "unknown start of token",
                                                           c);
-                unicode_chars::check_for_substitution(&self, c, &mut err);
+                unicode_chars::check_for_substitution(self, c, &mut err);
                 self.fatal_errs.push(err);
                 Err(())
             }
@@ -1490,14 +1490,14 @@ fn read_to_eol(&mut self) -> String {
         if self.ch_is('\n') {
             self.bump();
         }
-        return val;
+        val
     }
 
     fn read_one_line_comment(&mut self) -> String {
         let val = self.read_to_eol();
         assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') ||
                 (val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!'));
-        return val;
+        val
     }
 
     fn consume_non_eol_whitespace(&mut self) {
@@ -1541,7 +1541,7 @@ fn scan_byte(&mut self) -> token::Lit {
             Symbol::intern("?")
         };
         self.bump(); // advance ch past token
-        return token::Byte(id);
+        token::Byte(id)
     }
 
     fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool {
@@ -1574,7 +1574,7 @@ fn scan_byte_string(&mut self) -> token::Lit {
             Symbol::intern("??")
         };
         self.bump();
-        return token::ByteStr(id);
+        token::ByteStr(id)
     }
 
     fn scan_raw_byte_string(&mut self) -> token::Lit {
@@ -1627,8 +1627,8 @@ fn scan_raw_byte_string(&mut self) -> token::Lit {
             self.bump();
         }
         self.bump();
-        return token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
-                                 hash_count);
+        token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
+                                 hash_count)
     }
 }
 
@@ -1646,7 +1646,7 @@ fn in_range(c: Option<char>, lo: char, hi: char) -> bool {
 }
 
 fn is_dec_digit(c: Option<char>) -> bool {
-    return in_range(c, '0', '9');
+    in_range(c, '0', '9')
 }
 
 pub fn is_doc_comment(s: &str) -> bool {
index fe3ca1cf2305c0341ae3fb50db3616cb0e581c1b..1eff819d755493f33f713b6281100591bab78413 100644 (file)
@@ -107,18 +107,18 @@ pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess)
     parser.parse_inner_attributes()
 }
 
-pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                       -> PResult<'a, ast::Crate> {
+pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess)
+                                       -> PResult<ast::Crate> {
     new_parser_from_source_str(sess, name, source).parse_crate_mod()
 }
 
-pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                             -> PResult<'a, Vec<ast::Attribute>> {
+pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess)
+                                             -> PResult<Vec<ast::Attribute>> {
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
-pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, P<ast::Expr>> {
+pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<P<ast::Expr>> {
     new_parser_from_source_str(sess, name, source).parse_expr()
 }
 
@@ -126,29 +126,29 @@ pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a Pa
 ///
 /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err`
 /// when a syntax error occurred.
-pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, Option<P<ast::Item>>> {
+pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<Option<P<ast::Item>>> {
     new_parser_from_source_str(sess, name, source).parse_item()
 }
 
-pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, ast::MetaItem> {
+pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<ast::MetaItem> {
     new_parser_from_source_str(sess, name, source).parse_meta_item()
 }
 
-pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                      -> PResult<'a, Option<ast::Stmt>> {
+pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess)
+                                      -> PResult<Option<ast::Stmt>> {
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
                                         -> TokenStream {
     filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
 }
 
 // Create a new parser from a source string
-pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
-                                      -> Parser<'a> {
+pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String)
+                                      -> Parser {
     filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
 }
 
@@ -173,7 +173,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 }
 
 /// Given a filemap and config, return a parser
-pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
+pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
     let end_pos = filemap.end_pos;
     let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
 
@@ -186,7 +186,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
     stream_to_parser(sess, tts.into_iter().collect())
 }
 
@@ -216,8 +216,8 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream
     panictry!(srdr.parse_all_token_trees())
 }
 
-/// Given stream and the ParseSess, produce a parser
-pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+/// Given stream and the `ParseSess`, produce a parser
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
     Parser::new(sess, stream, None, false)
 }
 
@@ -251,7 +251,7 @@ pub fn char_lit(lit: &str) -> (char, isize) {
             (c, 4)
         }
         'u' => {
-            assert!(lit.as_bytes()[2] == b'{');
+            assert_eq!(lit.as_bytes()[2], b'{');
             let idx = lit.find('}').unwrap();
             let v = u32::from_str_radix(&lit[3..idx], 16).unwrap();
             let c = char::from_u32(v).unwrap();
@@ -287,51 +287,46 @@ fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) {
     }
 
     let mut chars = lit.char_indices().peekable();
-    loop {
-        match chars.next() {
-            Some((i, c)) => {
-                match c {
-                    '\\' => {
-                        let ch = chars.peek().unwrap_or_else(|| {
-                            panic!("{}", error(i))
-                        }).1;
-
-                        if ch == '\n' {
-                            eat(&mut chars);
-                        } else if ch == '\r' {
-                            chars.next();
-                            let ch = chars.peek().unwrap_or_else(|| {
-                                panic!("{}", error(i))
-                            }).1;
-
-                            if ch != '\n' {
-                                panic!("lexer accepted bare CR");
-                            }
-                            eat(&mut chars);
-                        } else {
-                            // otherwise, a normal escape
-                            let (c, n) = char_lit(&lit[i..]);
-                            for _ in 0..n - 1 { // we don't need to move past the first \
-                                chars.next();
-                            }
-                            res.push(c);
-                        }
-                    },
-                    '\r' => {
-                        let ch = chars.peek().unwrap_or_else(|| {
-                            panic!("{}", error(i))
-                        }).1;
+    while let Some((i, c)) = chars.next() {
+        match c {
+            '\\' => {
+                let ch = chars.peek().unwrap_or_else(|| {
+                    panic!("{}", error(i))
+                }).1;
+
+                if ch == '\n' {
+                    eat(&mut chars);
+                } else if ch == '\r' {
+                    chars.next();
+                    let ch = chars.peek().unwrap_or_else(|| {
+                        panic!("{}", error(i))
+                    }).1;
 
-                        if ch != '\n' {
-                            panic!("lexer accepted bare CR");
-                        }
+                    if ch != '\n' {
+                        panic!("lexer accepted bare CR");
+                    }
+                    eat(&mut chars);
+                } else {
+                    // otherwise, a normal escape
+                    let (c, n) = char_lit(&lit[i..]);
+                    for _ in 0..n - 1 { // we don't need to move past the first \
                         chars.next();
-                        res.push('\n');
                     }
-                    c => res.push(c),
+                    res.push(c);
                 }
             },
-            None => break
+            '\r' => {
+                let ch = chars.peek().unwrap_or_else(|| {
+                    panic!("{}", error(i))
+                }).1;
+
+                if ch != '\n' {
+                    panic!("lexer accepted bare CR");
+                }
+                chars.next();
+                res.push('\n');
+            }
+            c => res.push(c),
         }
     }
 
@@ -346,22 +341,16 @@ pub fn raw_str_lit(lit: &str) -> String {
     debug!("raw_str_lit: given {}", escape_default(lit));
     let mut res = String::with_capacity(lit.len());
 
-    // FIXME #8372: This could be a for-loop if it didn't borrow the iterator
     let mut chars = lit.chars().peekable();
-    loop {
-        match chars.next() {
-            Some(c) => {
-                if c == '\r' {
-                    if *chars.peek().unwrap() != '\n' {
-                        panic!("lexer accepted bare CR");
-                    }
-                    chars.next();
-                    res.push('\n');
-                } else {
-                    res.push(c);
-                }
-            },
-            None => break
+    while let Some(c) = chars.next() {
+        if c == '\r' {
+            if *chars.peek().unwrap() != '\n' {
+                panic!("lexer accepted bare CR");
+            }
+            chars.next();
+            res.push('\n');
+        } else {
+            res.push(c);
         }
     }
 
@@ -459,7 +448,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
     if lit.len() == 1 {
         (lit.as_bytes()[0], 1)
     } else {
-        assert!(lit.as_bytes()[0] == b'\\', err(0));
+        assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0));
         let b = match lit.as_bytes()[1] {
             b'"' => b'"',
             b'n' => b'\n',
@@ -480,7 +469,7 @@ pub fn byte_lit(lit: &str) -> (u8, usize) {
                 }
             }
         };
-        return (b, 2);
+        (b, 2)
     }
 }
 
@@ -491,7 +480,7 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
     let error = |i| format!("lexer should have rejected {} at {}", lit, i);
 
     /// Eat everything up to a non-whitespace
-    fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
+    fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
         loop {
             match it.peek().map(|x| x.1) {
                 Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
@@ -578,7 +567,7 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler
             if let Some(err) = err {
                 err!(diag, |span, diag| diag.span_err(span, err));
             }
-            return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
+            return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
         }
     }
 
index d5baec675e44f2116ee1707c94315c7ba11ba2c7..078e86aa2941f36a8c28332986dcf3772177c15a 100644 (file)
@@ -59,7 +59,7 @@ fn report(&mut self,
 
         if !self.obsolete_set.contains(&kind) &&
             (error || self.sess.span_diagnostic.can_emit_warnings) {
-            err.note(&format!("{}", desc));
+            err.note(desc);
             self.obsolete_set.insert(kind);
         }
         err.emit();
index ca1351e3b4158c89793b06bd622f4e54f22aad99..4741f896d3cc0c8c6ce38501dd2e1cb1a7dcfe0e 100644 (file)
@@ -248,7 +248,7 @@ fn next(&mut self) -> TokenAndSpan {
     fn next_desugared(&mut self) -> TokenAndSpan {
         let (sp, name) = match self.next() {
             TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
-            tok @ _ => return tok,
+            tok => return tok,
         };
 
         let stripped = strip_doc_comment_decoration(&name.as_str());
@@ -354,7 +354,7 @@ pub enum Error {
 }
 
 impl Error {
-    pub fn span_err<'a>(self, sp: Span, handler: &'a errors::Handler) -> DiagnosticBuilder<'a> {
+    pub fn span_err(self, sp: Span, handler: &errors::Handler) -> DiagnosticBuilder {
         match self {
             Error::FileNotFoundForModule { ref mod_name,
                                            ref default_path,
@@ -478,9 +478,10 @@ pub fn new(sess: &'a ParseSess,
     }
 
     fn next_tok(&mut self) -> TokenAndSpan {
-        let mut next = match self.desugar_doc_comments {
-            true => self.token_cursor.next_desugared(),
-            false => self.token_cursor.next(),
+        let mut next = if self.desugar_doc_comments {
+            self.token_cursor.next_desugared()
+        } else {
+            self.token_cursor.next()
         };
         if next.sp == syntax_pos::DUMMY_SP {
             next.sp = self.prev_span;
@@ -551,7 +552,7 @@ fn tokens_to_string(tokens: &[TokenType]) -> String {
             // This might be a sign we need a connect method on Iterator.
             let b = i.next()
                      .map_or("".to_string(), |t| t.to_string());
-            i.enumerate().fold(b, |mut b, (i, ref a)| {
+            i.enumerate().fold(b, |mut b, (i, a)| {
                 if tokens.len() > 2 && i == tokens.len() - 2 {
                     b.push_str(", or ");
                 } else if tokens.len() == 2 && i == tokens.len() - 2 {
@@ -985,18 +986,15 @@ fn parse_seq_to_before_tokens<T, F, Fe>(&mut self,
                 token::CloseDelim(..) | token::Eof => break,
                 _ => {}
             };
-            match sep.sep {
-                Some(ref t) => {
-                    if first {
-                        first = false;
-                    } else {
-                        if let Err(e) = self.expect(t) {
-                            fe(e);
-                            break;
-                        }
+            if let Some(ref t) = sep.sep {
+                if first {
+                    first = false;
+                } else {
+                    if let Err(e) = self.expect(t) {
+                        fe(e);
+                        break;
                     }
                 }
-                _ => ()
             }
             if sep.trailing_sep_allowed && kets.iter().any(|k| self.check(k)) {
                 break;
@@ -1493,7 +1491,7 @@ fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PRe
         let sum_span = ty.span.to(self.prev_span);
 
         let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
-            "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty));
+            "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
 
         match ty.node {
             TyKind::Rptr(ref lifetime, ref mut_ty) => {
@@ -1547,7 +1545,7 @@ pub fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
 
     pub fn is_named_argument(&mut self) -> bool {
         let offset = match self.token {
-            token::BinOp(token::And) => 1,
+            token::BinOp(token::And) |
             token::AndAnd => 1,
             _ if self.token.is_keyword(keywords::Mut) => 1,
             _ => 0
@@ -3154,10 +3152,11 @@ pub fn parse_arm(&mut self) -> PResult<'a, Arm> {
 
         let attrs = self.parse_outer_attributes()?;
         let pats = self.parse_pats()?;
-        let mut guard = None;
-        if self.eat_keyword(keywords::If) {
-            guard = Some(self.parse_expr()?);
-        }
+        let guard = if self.eat_keyword(keywords::If) {
+            Some(self.parse_expr()?)
+        } else {
+            None
+        };
         self.expect(&token::FatArrow)?;
         let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?;
 
@@ -3600,10 +3599,11 @@ fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
         let lo = self.span;
         let pat = self.parse_pat()?;
 
-        let mut ty = None;
-        if self.eat(&token::Colon) {
-            ty = Some(self.parse_ty()?);
-        }
+        let ty = if self.eat(&token::Colon) {
+            Some(self.parse_ty()?)
+        } else {
+            None
+        };
         let init = self.parse_initializer()?;
         Ok(P(ast::Local {
             ty: ty,
@@ -3929,7 +3929,7 @@ fn parse_stmt_without_recovery(&mut self,
                 },
                 None => {
                     let unused_attrs = |attrs: &[_], s: &mut Self| {
-                        if attrs.len() > 0 {
+                        if !attrs.is_empty() {
                             if s.prev_token_kind == PrevTokenKind::DocComment {
                                 s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
                             } else {
@@ -4815,7 +4815,7 @@ fn parse_impl_method(&mut self, vis: &Visibility, at_end: &mut bool)
                 self.expect(&token::Not)?;
             }
 
-            self.complain_if_pub_macro(&vis, prev_span);
+            self.complain_if_pub_macro(vis, prev_span);
 
             // eat a matched-delimiter token tree:
             *at_end = true;
@@ -4917,13 +4917,10 @@ fn parse_item_impl(&mut self,
                 }
             }
         } else {
-            match polarity {
-                ast::ImplPolarity::Negative => {
-                    // This is a negated type implementation
-                    // `impl !MyType {}`, which is not allowed.
-                    self.span_err(neg_span, "inherent implementation can't be negated");
-                },
-                _ => {}
+            if polarity == ast::ImplPolarity::Negative {
+                // This is a negated type implementation
+                // `impl !MyType {}`, which is not allowed.
+                self.span_err(neg_span, "inherent implementation can't be negated");
             }
             None
         };
@@ -5185,7 +5182,7 @@ pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibili
                 let path_span = self.prev_span;
                 let help_msg = format!("make this visible only to module `{}` with `in`:", path);
                 self.expect(&token::CloseDelim(token::Paren))?;  // `)`
-                let mut err = self.span_fatal_help(path_span, &msg, &suggestion);
+                let mut err = self.span_fatal_help(path_span, msg, suggestion);
                 err.span_suggestion(path_span, &help_msg, format!("in {}", path));
                 err.emit();  // emit diagnostic, but continue with public visibility
             }
index 25cabef70c15b5765b89a916db34a457a62a6e95..77db604c56e118c0596364536b3b1ed3674a2f24 100644 (file)
@@ -53,6 +53,10 @@ impl DelimToken {
     pub fn len(self) -> usize {
         if self == NoDelim { 0 } else { 1 }
     }
+
+    pub fn is_empty(self) -> bool {
+        self == NoDelim
+    }
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
@@ -198,17 +202,17 @@ pub fn is_like_gt(&self) -> bool {
     pub fn can_begin_expr(&self) -> bool {
         match *self {
             Ident(ident)                => ident_can_begin_expr(ident), // value name or keyword
-            OpenDelim(..)               => true, // tuple, array or block
-            Literal(..)                 => true, // literal
-            Not                         => true, // operator not
-            BinOp(Minus)                => true, // unary minus
-            BinOp(Star)                 => true, // dereference
-            BinOp(Or) | OrOr            => true, // closure
-            BinOp(And)                  => true, // reference
-            AndAnd                      => true, // double reference
-            DotDot | DotDotDot          => true, // range notation
-            Lt | BinOp(Shl)             => true, // associated path
-            ModSep                      => true, // global path
+            OpenDelim(..)               | // tuple, array or block
+            Literal(..)                 | // literal
+            Not                         | // operator not
+            BinOp(Minus)                | // unary minus
+            BinOp(Star)                 | // dereference
+            BinOp(Or) | OrOr            | // closure
+            BinOp(And)                  | // reference
+            AndAnd                      | // double reference
+            DotDot | DotDotDot          | // range notation
+            Lt | BinOp(Shl)             | // associated path
+            ModSep                      | // global path
             Pound                       => true, // expression attributes
             Interpolated(ref nt) => match **nt {
                 NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
@@ -222,16 +226,16 @@ pub fn can_begin_expr(&self) -> bool {
     pub fn can_begin_type(&self) -> bool {
         match *self {
             Ident(ident)                => ident_can_begin_type(ident), // type name or keyword
-            OpenDelim(Paren)            => true, // tuple
-            OpenDelim(Bracket)          => true, // array
-            Underscore                  => true, // placeholder
-            Not                         => true, // never
-            BinOp(Star)                 => true, // raw pointer
-            BinOp(And)                  => true, // reference
-            AndAnd                      => true, // double reference
-            Question                    => true, // maybe bound in trait object
-            Lifetime(..)                => true, // lifetime bound in trait object
-            Lt | BinOp(Shl)             => true, // associated path
+            OpenDelim(Paren)            | // tuple
+            OpenDelim(Bracket)          | // array
+            Underscore                  | // placeholder
+            Not                         | // never
+            BinOp(Star)                 | // raw pointer
+            BinOp(And)                  | // reference
+            AndAnd                      | // double reference
+            Question                    | // maybe bound in trait object
+            Lifetime(..)                | // lifetime bound in trait object
+            Lt | BinOp(Shl)             | // associated path
             ModSep                      => true, // global path
             Interpolated(ref nt) => match **nt {
                 NtIdent(..) | NtTy(..) | NtPath(..) => true,
index 1d67c2a2c2b74023c8f1fa1c7c5e4dc4ca8d9e6b..e893c859247c6110790921f528c04c99f9403fae 100644 (file)
 //! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
 //! and point-in-infinite-stream senses freely.
 //!
-//! There is a parallel ring buffer, 'size', that holds the calculated size of
+//! There is a parallel ring buffer, `size`, that holds the calculated size of
 //! each token. Why calculated? Because for Begin/End pairs, the "size"
 //! includes everything between the pair. That is, the "size" of Begin is
 //! actually the sum of the sizes of everything between Begin and the paired
-//! End that follows. Since that is arbitrarily far in the future, 'size' is
+//! End that follows. Since that is arbitrarily far in the future, `size` is
 //! being rewritten regularly while the printer runs; in fact most of the
-//! machinery is here to work out 'size' entries on the fly (and give up when
+//! machinery is here to work out `size` entries on the fly (and give up when
 //! they're so obviously over-long that "infinity" is a good enough
 //! approximation for purposes of line breaking).
 //!
 //! The "input side" of the printer is managed as an abstract process called
-//! SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in
+//! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in
 //! other words, the process of calculating 'size' entries.
 //!
 //! The "output side" of the printer is managed by an abstract process called
-//! PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
+//! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to
 //! do with each token/size pair it consumes as it goes. It's trying to consume
 //! the entire buffered window, but can't output anything until the size is >=
 //! 0 (sizes are set to negative while they're pending calculation).
@@ -409,7 +409,7 @@ pub fn scan_pop_bottom(&mut self) -> usize {
     pub fn advance_right(&mut self) {
         self.right += 1;
         self.right %= self.buf_len;
-        assert!(self.right != self.left);
+        assert_ne!(self.right, self.left);
     }
     pub fn advance_left(&mut self) -> io::Result<()> {
         debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right,
index 0c7e8fda83766cbdbc8f4f0df87c1e42b5f9c523..83c289ff80b9251bd2df854ebed1207b7f5a09f0 100644 (file)
@@ -233,7 +233,7 @@ pub fn token_to_string(tok: &Token) -> String {
         token::CloseDelim(token::Bracket) => "]".to_string(),
         token::OpenDelim(token::Brace) => "{".to_string(),
         token::CloseDelim(token::Brace) => "}".to_string(),
-        token::OpenDelim(token::NoDelim) => " ".to_string(),
+        token::OpenDelim(token::NoDelim) |
         token::CloseDelim(token::NoDelim) => " ".to_string(),
         token::Pound                => "#".to_string(),
         token::Dollar               => "$".to_string(),
@@ -244,7 +244,7 @@ pub fn token_to_string(tok: &Token) -> String {
             let mut out = match lit {
                 token::Byte(b)           => format!("b'{}'", b),
                 token::Char(c)           => format!("'{}'", c),
-                token::Float(c)          => c.to_string(),
+                token::Float(c)          |
                 token::Integer(c)        => c.to_string(),
                 token::Str_(s)           => format!("\"{}\"", s),
                 token::StrRaw(s, n)      => format!("r{delim}\"{string}\"{delim}",
@@ -277,23 +277,23 @@ pub fn token_to_string(tok: &Token) -> String {
         token::Shebang(s)           => format!("/* shebang: {}*/", s),
 
         token::Interpolated(ref nt) => match **nt {
-            token::NtExpr(ref e)        => expr_to_string(&e),
-            token::NtMeta(ref e)        => meta_item_to_string(&e),
-            token::NtTy(ref e)          => ty_to_string(&e),
-            token::NtPath(ref e)        => path_to_string(&e),
-            token::NtItem(ref e)        => item_to_string(&e),
-            token::NtBlock(ref e)       => block_to_string(&e),
-            token::NtStmt(ref e)        => stmt_to_string(&e),
-            token::NtPat(ref e)         => pat_to_string(&e),
+            token::NtExpr(ref e)        => expr_to_string(e),
+            token::NtMeta(ref e)        => meta_item_to_string(e),
+            token::NtTy(ref e)          => ty_to_string(e),
+            token::NtPath(ref e)        => path_to_string(e),
+            token::NtItem(ref e)        => item_to_string(e),
+            token::NtBlock(ref e)       => block_to_string(e),
+            token::NtStmt(ref e)        => stmt_to_string(e),
+            token::NtPat(ref e)         => pat_to_string(e),
             token::NtIdent(ref e)       => ident_to_string(e.node),
             token::NtTT(ref tree)       => tt_to_string(tree.clone()),
-            token::NtArm(ref e)         => arm_to_string(&e),
-            token::NtImplItem(ref e)    => impl_item_to_string(&e),
-            token::NtTraitItem(ref e)   => trait_item_to_string(&e),
-            token::NtGenerics(ref e)    => generics_to_string(&e),
-            token::NtWhereClause(ref e) => where_clause_to_string(&e),
-            token::NtArg(ref e)         => arg_to_string(&e),
-            token::NtVis(ref e)         => vis_to_string(&e),
+            token::NtArm(ref e)         => arm_to_string(e),
+            token::NtImplItem(ref e)    => impl_item_to_string(e),
+            token::NtTraitItem(ref e)   => trait_item_to_string(e),
+            token::NtGenerics(ref e)    => generics_to_string(e),
+            token::NtWhereClause(ref e) => where_clause_to_string(e),
+            token::NtArg(ref e)         => arg_to_string(e),
+            token::NtVis(ref e)         => vis_to_string(e),
         }
     }
 }
@@ -520,8 +520,7 @@ fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> {
 
         let mut result = None;
 
-        if let &Some(ref lits) = self.literals()
-        {
+        if let Some(ref lits) = *self.literals() {
             while cur_lit < lits.len() {
                 let ltrl = (*lits)[cur_lit].clone();
                 if ltrl.pos > pos { break; }
@@ -618,11 +617,8 @@ fn next_comment(&mut self) -> Option<comments::Comment> {
 
     fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
         self.maybe_print_comment(lit.span.lo)?;
-        match self.next_lit(lit.span.lo) {
-            Some(ref ltrl) => {
-                return word(self.writer(), &(*ltrl).lit);
-            }
-            _ => ()
+        if let Some(ref ltrl) = self.next_lit(lit.span.lo) {
+            return word(self.writer(), &(*ltrl).lit);
         }
         match lit.node {
             ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
@@ -799,7 +795,7 @@ fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> {
                 self.popen()?;
                 self.commasep(Consistent,
                               &items[..],
-                              |s, i| s.print_meta_list_item(&i))?;
+                              |s, i| s.print_meta_list_item(i))?;
                 self.pclose()?;
             }
         }
@@ -982,14 +978,14 @@ pub fn commasep_cmnt<T, F, G>(&mut self,
 
     pub fn commasep_exprs(&mut self, b: Breaks,
                           exprs: &[P<ast::Expr>]) -> io::Result<()> {
-        self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span)
+        self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
     }
 
     pub fn print_mod(&mut self, _mod: &ast::Mod,
                      attrs: &[ast::Attribute]) -> io::Result<()> {
         self.print_inner_attributes(attrs)?;
         for item in &_mod.items {
-            self.print_item(&item)?;
+            self.print_item(item)?;
         }
         Ok(())
     }
@@ -1018,7 +1014,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
         match ty.node {
             ast::TyKind::Slice(ref ty) => {
                 word(&mut self.s, "[")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, "]")?;
             }
             ast::TyKind::Ptr(ref mt) => {
@@ -1040,7 +1036,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
             ast::TyKind::Tup(ref elts) => {
                 self.popen()?;
                 self.commasep(Inconsistent, &elts[..],
-                              |s, ty| s.print_type(&ty))?;
+                              |s, ty| s.print_type(ty))?;
                 if elts.len() == 1 {
                     word(&mut self.s, ",")?;
                 }
@@ -1048,7 +1044,7 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
             }
             ast::TyKind::Paren(ref typ) => {
                 self.popen()?;
-                self.print_type(&typ)?;
+                self.print_type(typ)?;
                 self.pclose()?;
             }
             ast::TyKind::BareFn(ref f) => {
@@ -1081,14 +1077,14 @@ pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
             }
             ast::TyKind::Array(ref ty, ref v) => {
                 word(&mut self.s, "[")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, "; ")?;
-                self.print_expr(&v)?;
+                self.print_expr(v)?;
                 word(&mut self.s, "]")?;
             }
             ast::TyKind::Typeof(ref e) => {
                 word(&mut self.s, "typeof(")?;
-                self.print_expr(&e)?;
+                self.print_expr(e)?;
                 word(&mut self.s, ")")?;
             }
             ast::TyKind::Infer => {
@@ -1130,7 +1126,7 @@ pub fn print_foreign_item(&mut self,
                 }
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&t)?;
+                self.print_type(t)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the head-ibox
                 self.end() // end the outer cbox
@@ -1187,7 +1183,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 self.head(&visibility_qualified(&item.vis, "extern crate"))?;
                 if let Some(p) = *optional_path {
                     let val = p.as_str();
-                    if val.contains("-") {
+                    if val.contains('-') {
                         self.print_string(&val, ast::StrStyle::Cooked)?;
                     } else {
                         self.print_name(p)?;
@@ -1203,7 +1199,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
             }
             ast::ItemKind::Use(ref vp) => {
                 self.head(&visibility_qualified(&item.vis, "use"))?;
-                self.print_view_path(&vp)?;
+                self.print_view_path(vp)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end inner head-block
                 self.end()?; // end outer head-block
@@ -1215,12 +1211,12 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 }
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 space(&mut self.s)?;
                 self.end()?; // end the head-ibox
 
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer cbox
             }
@@ -1228,12 +1224,12 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 self.head(&visibility_qualified(&item.vis, "const"))?;
                 self.print_ident(item.ident)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 space(&mut self.s)?;
                 self.end()?; // end the head-ibox
 
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer cbox
             }
@@ -1249,7 +1245,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                     &item.vis
                 )?;
                 word(&mut self.s, " ")?;
-                self.print_block_with_attrs(&body, &item.attrs)?;
+                self.print_block_with_attrs(body, &item.attrs)?;
             }
             ast::ItemKind::Mod(ref _mod) => {
                 self.head(&visibility_qualified(&item.vis, "mod"))?;
@@ -1282,7 +1278,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 self.print_where_clause(&params.where_clause)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 word(&mut self.s, ";")?;
                 self.end()?; // end the outer ibox
             }
@@ -1297,11 +1293,11 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
             }
             ast::ItemKind::Struct(ref struct_def, ref generics) => {
                 self.head(&visibility_qualified(&item.vis, "struct"))?;
-                self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+                self.print_struct(struct_def, generics, item.ident, item.span, true)?;
             }
             ast::ItemKind::Union(ref struct_def, ref generics) => {
                 self.head(&visibility_qualified(&item.vis, "union"))?;
-                self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+                self.print_struct(struct_def, generics, item.ident, item.span, true)?;
             }
             ast::ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
                 self.head("")?;
@@ -1333,11 +1329,8 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                     space(&mut self.s)?;
                 }
 
-                match polarity {
-                    ast::ImplPolarity::Negative => {
-                        word(&mut self.s, "!")?;
-                    },
-                    _ => {}
+                if polarity == ast::ImplPolarity::Negative {
+                    word(&mut self.s, "!")?;
                 }
 
                 if let Some(ref t) = *opt_trait {
@@ -1346,7 +1339,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                     self.word_space("for")?;
                 }
 
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 self.print_where_clause(&generics.where_clause)?;
 
                 space(&mut self.s)?;
@@ -1543,7 +1536,7 @@ pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
             Some(ref d) => {
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&d)
+                self.print_expr(d)
             }
             _ => Ok(())
         }
@@ -1571,7 +1564,7 @@ pub fn print_trait_item(&mut self, ti: &ast::TraitItem)
         self.print_outer_attributes(&ti.attrs)?;
         match ti.node {
             ast::TraitItemKind::Const(ref ty, ref default) => {
-                self.print_associated_const(ti.ident, &ty,
+                self.print_associated_const(ti.ident, ty,
                                             default.as_ref().map(|expr| &**expr),
                                             &ast::Visibility::Inherited)?;
             }
@@ -1614,7 +1607,7 @@ pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
         self.print_defaultness(ii.defaultness)?;
         match ii.node {
             ast::ImplItemKind::Const(ref ty, ref expr) => {
-                self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?;
+                self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?;
             }
             ast::ImplItemKind::Method(ref sig, ref body) => {
                 self.head("")?;
@@ -1650,38 +1643,38 @@ pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
                 self.word_nbsp("let")?;
 
                 self.ibox(INDENT_UNIT)?;
-                self.print_local_decl(&loc)?;
+                self.print_local_decl(loc)?;
                 self.end()?;
                 if let Some(ref init) = loc.init {
                     self.nbsp()?;
                     self.word_space("=")?;
-                    self.print_expr(&init)?;
+                    self.print_expr(init)?;
                 }
                 word(&mut self.s, ";")?;
                 self.end()?;
             }
-            ast::StmtKind::Item(ref item) => self.print_item(&item)?,
+            ast::StmtKind::Item(ref item) => self.print_item(item)?,
             ast::StmtKind::Expr(ref expr) => {
                 self.space_if_not_bol()?;
-                self.print_expr_outer_attr_style(&expr, false)?;
+                self.print_expr_outer_attr_style(expr, false)?;
                 if parse::classify::expr_requires_semi_to_be_stmt(expr) {
                     word(&mut self.s, ";")?;
                 }
             }
             ast::StmtKind::Semi(ref expr) => {
                 self.space_if_not_bol()?;
-                self.print_expr_outer_attr_style(&expr, false)?;
+                self.print_expr_outer_attr_style(expr, false)?;
                 word(&mut self.s, ";")?;
             }
             ast::StmtKind::Mac(ref mac) => {
                 let (ref mac, style, ref attrs) = **mac;
                 self.space_if_not_bol()?;
-                self.print_outer_attributes(&attrs)?;
+                self.print_outer_attributes(attrs)?;
                 let delim = match style {
                     ast::MacStmtStyle::Braces => token::Brace,
                     _ => token::Paren
                 };
-                self.print_mac(&mac, delim)?;
+                self.print_mac(mac, delim)?;
                 if style == ast::MacStmtStyle::Semicolon {
                     word(&mut self.s, ";")?;
                 }
@@ -1735,7 +1728,7 @@ pub fn print_block_maybe_unclosed(&mut self,
                 ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
                     self.maybe_print_comment(st.span.lo)?;
                     self.space_if_not_bol()?;
-                    self.print_expr_outer_attr_style(&expr, false)?;
+                    self.print_expr_outer_attr_style(expr, false)?;
                     self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
                 }
                 _ => self.print_stmt(st)?,
@@ -1755,9 +1748,9 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else if ")?;
-                        self.print_expr(&i)?;
+                        self.print_expr(i)?;
                         space(&mut self.s)?;
-                        self.print_block(&then)?;
+                        self.print_block(then)?;
                         self.print_else(e.as_ref().map(|e| &**e))
                     }
                     // "another else-if-let"
@@ -1765,12 +1758,12 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else if let ")?;
-                        self.print_pat(&pat)?;
+                        self.print_pat(pat)?;
                         space(&mut self.s)?;
                         self.word_space("=")?;
-                        self.print_expr(&expr)?;
+                        self.print_expr(expr)?;
                         space(&mut self.s)?;
-                        self.print_block(&then)?;
+                        self.print_block(then)?;
                         self.print_else(e.as_ref().map(|e| &**e))
                     }
                     // "final else"
@@ -1778,7 +1771,7 @@ fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
                         self.cbox(INDENT_UNIT - 1)?;
                         self.ibox(0)?;
                         word(&mut self.s, " else ")?;
-                        self.print_block(&b)
+                        self.print_block(b)
                     }
                     // BLEAH, constraints would be great here
                     _ => {
@@ -1844,12 +1837,8 @@ pub fn check_expr_bin_needs_paren(&mut self, sub_expr: &ast::Expr,
                                       binop: ast::BinOp) -> bool {
         match sub_expr.node {
             ast::ExprKind::Binary(ref sub_op, _, _) => {
-                if AssocOp::from_ast_binop(sub_op.node).precedence() <
-                    AssocOp::from_ast_binop(binop.node).precedence() {
-                    true
-                } else {
-                    false
-                }
+                AssocOp::from_ast_binop(sub_op.node).precedence() <
+                    AssocOp::from_ast_binop(binop.node).precedence()
             }
             _ => true
         }
@@ -1929,7 +1918,7 @@ fn print_expr_struct(&mut self,
                     space(&mut self.s)?;
                 }
                 word(&mut self.s, "..")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 self.end()?;
             }
             _ => if !fields.is_empty() {
@@ -1969,7 +1958,7 @@ fn print_expr_method_call(&mut self,
         if !tys.is_empty() {
             word(&mut self.s, "::<")?;
             self.commasep(Inconsistent, tys,
-                          |s, ty| s.print_type(&ty))?;
+                          |s, ty| s.print_type(ty))?;
             word(&mut self.s, ">")?;
         }
         self.print_call_post(base_args)
@@ -2038,7 +2027,7 @@ fn print_expr_outer_attr_style(&mut self,
                 self.print_expr_vec(&exprs[..], attrs)?;
             }
             ast::ExprKind::Repeat(ref element, ref count) => {
-                self.print_expr_repeat(&element, &count, attrs)?;
+                self.print_expr_repeat(element, count, attrs)?;
             }
             ast::ExprKind::Struct(ref path, ref fields, ref wth) => {
                 self.print_expr_struct(path, &fields[..], wth, attrs)?;
@@ -2047,43 +2036,43 @@ fn print_expr_outer_attr_style(&mut self,
                 self.print_expr_tup(&exprs[..], attrs)?;
             }
             ast::ExprKind::Call(ref func, ref args) => {
-                self.print_expr_call(&func, &args[..])?;
+                self.print_expr_call(func, &args[..])?;
             }
             ast::ExprKind::MethodCall(ident, ref tys, ref args) => {
                 self.print_expr_method_call(ident, &tys[..], &args[..])?;
             }
             ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
-                self.print_expr_binary(op, &lhs, &rhs)?;
+                self.print_expr_binary(op, lhs, rhs)?;
             }
             ast::ExprKind::Unary(op, ref expr) => {
-                self.print_expr_unary(op, &expr)?;
+                self.print_expr_unary(op, expr)?;
             }
             ast::ExprKind::AddrOf(m, ref expr) => {
-                self.print_expr_addr_of(m, &expr)?;
+                self.print_expr_addr_of(m, expr)?;
             }
             ast::ExprKind::Lit(ref lit) => {
-                self.print_literal(&lit)?;
+                self.print_literal(lit)?;
             }
             ast::ExprKind::Cast(ref expr, ref ty) => {
                 if let ast::ExprKind::Cast(..) = expr.node {
-                    self.print_expr(&expr)?;
+                    self.print_expr(expr)?;
                 } else {
-                    self.print_expr_maybe_paren(&expr)?;
+                    self.print_expr_maybe_paren(expr)?;
                 }
                 space(&mut self.s)?;
                 self.word_space("as")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
             }
             ast::ExprKind::Type(ref expr, ref ty) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 self.word_space(":")?;
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
             }
             ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
-                self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
+                self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?;
             }
             ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
-                self.print_if_let(&pat, &expr, &blk, elseopt.as_ref().map(|e| &**e))?;
+                self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
             }
             ast::ExprKind::While(ref test, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2091,9 +2080,9 @@ fn print_expr_outer_attr_style(&mut self,
                     self.word_space(":")?;
                 }
                 self.head("while")?;
-                self.print_expr(&test)?;
+                self.print_expr(test)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2101,12 +2090,12 @@ fn print_expr_outer_attr_style(&mut self,
                     self.word_space(":")?;
                 }
                 self.head("while let")?;
-                self.print_pat(&pat)?;
+                self.print_pat(pat)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2114,12 +2103,12 @@ fn print_expr_outer_attr_style(&mut self,
                     self.word_space(":")?;
                 }
                 self.head("for")?;
-                self.print_pat(&pat)?;
+                self.print_pat(pat)?;
                 space(&mut self.s)?;
                 self.word_space("in")?;
-                self.print_expr(&iter)?;
+                self.print_expr(iter)?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Loop(ref blk, opt_ident) => {
                 if let Some(ident) = opt_ident {
@@ -2128,13 +2117,13 @@ fn print_expr_outer_attr_style(&mut self,
                 }
                 self.head("loop")?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Match(ref expr, ref arms) => {
                 self.cbox(INDENT_UNIT)?;
                 self.ibox(4)?;
                 self.word_nbsp("match")?;
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 space(&mut self.s)?;
                 self.bopen()?;
                 self.print_inner_attributes_no_trailing_hardbreak(attrs)?;
@@ -2146,7 +2135,7 @@ fn print_expr_outer_attr_style(&mut self,
             ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
                 self.print_capture_clause(capture_clause)?;
 
-                self.print_fn_block_args(&decl)?;
+                self.print_fn_block_args(decl)?;
                 space(&mut self.s)?;
                 self.print_expr(body)?;
                 self.end()?; // need to close a box
@@ -2161,48 +2150,48 @@ fn print_expr_outer_attr_style(&mut self,
                 self.cbox(INDENT_UNIT)?;
                 // head-box, will be closed by print-block after {
                 self.ibox(0)?;
-                self.print_block_with_attrs(&blk, attrs)?;
+                self.print_block_with_attrs(blk, attrs)?;
             }
             ast::ExprKind::Assign(ref lhs, ref rhs) => {
-                self.print_expr(&lhs)?;
+                self.print_expr(lhs)?;
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_expr(&rhs)?;
+                self.print_expr(rhs)?;
             }
             ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
-                self.print_expr(&lhs)?;
+                self.print_expr(lhs)?;
                 space(&mut self.s)?;
                 word(&mut self.s, op.node.to_string())?;
                 self.word_space("=")?;
-                self.print_expr(&rhs)?;
+                self.print_expr(rhs)?;
             }
             ast::ExprKind::Field(ref expr, id) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ".")?;
                 self.print_ident(id.node)?;
             }
             ast::ExprKind::TupField(ref expr, id) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, ".")?;
                 self.print_usize(id.node)?;
             }
             ast::ExprKind::Index(ref expr, ref index) => {
-                self.print_expr(&expr)?;
+                self.print_expr(expr)?;
                 word(&mut self.s, "[")?;
-                self.print_expr(&index)?;
+                self.print_expr(index)?;
                 word(&mut self.s, "]")?;
             }
             ast::ExprKind::Range(ref start, ref end, limits) => {
-                if let &Some(ref e) = start {
-                    self.print_expr(&e)?;
+                if let Some(ref e) = *start {
+                    self.print_expr(e)?;
                 }
                 if limits == ast::RangeLimits::HalfOpen {
                     word(&mut self.s, "..")?;
                 } else {
                     word(&mut self.s, "...")?;
                 }
-                if let &Some(ref e) = end {
-                    self.print_expr(&e)?;
+                if let Some(ref e) = *end {
+                    self.print_expr(e)?;
                 }
             }
             ast::ExprKind::Path(None, ref path) => {
@@ -2233,12 +2222,9 @@ fn print_expr_outer_attr_style(&mut self,
             }
             ast::ExprKind::Ret(ref result) => {
                 word(&mut self.s, "return")?;
-                match *result {
-                    Some(ref expr) => {
-                        word(&mut self.s, " ")?;
-                        self.print_expr(&expr)?;
-                    }
-                    _ => ()
+                if let Some(ref expr) = *result {
+                    word(&mut self.s, " ")?;
+                    self.print_expr(expr)?;
                 }
             }
             ast::ExprKind::InlineAsm(ref a) => {
@@ -2268,7 +2254,7 @@ fn print_expr_outer_attr_style(&mut self,
                 self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
                     s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
                     s.popen()?;
-                    s.print_expr(&o)?;
+                    s.print_expr(o)?;
                     s.pclose()?;
                     Ok(())
                 })?;
@@ -2308,7 +2294,7 @@ fn print_expr_outer_attr_style(&mut self,
             ast::ExprKind::Paren(ref e) => {
                 self.popen()?;
                 self.print_inner_attributes_inline(attrs)?;
-                self.print_expr(&e)?;
+                self.print_expr(e)?;
                 self.pclose()?;
             },
             ast::ExprKind::Try(ref e) => {
@@ -2318,7 +2304,7 @@ fn print_expr_outer_attr_style(&mut self,
             ast::ExprKind::Catch(ref blk) => {
                 self.head("do catch")?;
                 space(&mut self.s)?;
-                self.print_block_with_attrs(&blk, attrs)?
+                self.print_block_with_attrs(blk, attrs)?
             }
         }
         self.ann.post(self, NodeExpr(expr))?;
@@ -2329,7 +2315,7 @@ pub fn print_local_decl(&mut self, loc: &ast::Local) -> io::Result<()> {
         self.print_pat(&loc.pat)?;
         if let Some(ref ty) = loc.ty {
             self.word_space(":")?;
-            self.print_type(&ty)?;
+            self.print_type(ty)?;
         }
         Ok(())
     }
@@ -2397,7 +2383,7 @@ fn print_qpath(&mut self,
             space(&mut self.s)?;
             self.word_space("as")?;
             let depth = path.segments.len() - qself.position;
-            self.print_path(&path, false, depth, false)?;
+            self.print_path(path, false, depth, false)?;
         }
         word(&mut self.s, ">")?;
         word(&mut self.s, "::")?;
@@ -2438,7 +2424,7 @@ fn print_path_parameters(&mut self,
                     self.commasep(
                         Inconsistent,
                         &data.types,
-                        |s, ty| s.print_type(&ty))?;
+                        |s, ty| s.print_type(ty))?;
                         comma = true;
                 }
 
@@ -2461,13 +2447,13 @@ fn print_path_parameters(&mut self,
                 self.commasep(
                     Inconsistent,
                     &data.inputs,
-                    |s, ty| s.print_type(&ty))?;
+                    |s, ty| s.print_type(ty))?;
                 word(&mut self.s, ")")?;
 
                 if let Some(ref ty) = data.output {
                     self.space_if_not_bol()?;
                     self.word_space("->")?;
-                    self.print_type(&ty)?;
+                    self.print_type(ty)?;
                 }
             }
         }
@@ -2496,24 +2482,24 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
                 self.print_ident(path1.node)?;
                 if let Some(ref p) = *sub {
                     word(&mut self.s, "@")?;
-                    self.print_pat(&p)?;
+                    self.print_pat(p)?;
                 }
             }
             PatKind::TupleStruct(ref path, ref elts, ddpos) => {
                 self.print_path(path, true, 0, false)?;
                 self.popen()?;
                 if let Some(ddpos) = ddpos {
-                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
                     if ddpos != 0 {
                         self.word_space(",")?;
                     }
                     word(&mut self.s, "..")?;
                     if ddpos != elts.len() {
                         word(&mut self.s, ",")?;
-                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
                     }
                 } else {
-                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
                 }
                 self.pclose()?;
             }
@@ -2549,17 +2535,17 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
             PatKind::Tuple(ref elts, ddpos) => {
                 self.popen()?;
                 if let Some(ddpos) = ddpos {
-                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
                     if ddpos != 0 {
                         self.word_space(",")?;
                     }
                     word(&mut self.s, "..")?;
                     if ddpos != elts.len() {
                         word(&mut self.s, ",")?;
-                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+                        self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
                     }
                 } else {
-                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+                    self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
                     if elts.len() == 1 {
                         word(&mut self.s, ",")?;
                     }
@@ -2568,41 +2554,41 @@ pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
             }
             PatKind::Box(ref inner) => {
                 word(&mut self.s, "box ")?;
-                self.print_pat(&inner)?;
+                self.print_pat(inner)?;
             }
             PatKind::Ref(ref inner, mutbl) => {
                 word(&mut self.s, "&")?;
                 if mutbl == ast::Mutability::Mutable {
                     word(&mut self.s, "mut ")?;
                 }
-                self.print_pat(&inner)?;
+                self.print_pat(inner)?;
             }
             PatKind::Lit(ref e) => self.print_expr(&**e)?,
             PatKind::Range(ref begin, ref end, ref end_kind) => {
-                self.print_expr(&begin)?;
+                self.print_expr(begin)?;
                 space(&mut self.s)?;
                 match *end_kind {
                     RangeEnd::Included => word(&mut self.s, "...")?,
                     RangeEnd::Excluded => word(&mut self.s, "..")?,
                 }
-                self.print_expr(&end)?;
+                self.print_expr(end)?;
             }
             PatKind::Slice(ref before, ref slice, ref after) => {
                 word(&mut self.s, "[")?;
                 self.commasep(Inconsistent,
                                    &before[..],
-                                   |s, p| s.print_pat(&p))?;
+                                   |s, p| s.print_pat(p))?;
                 if let Some(ref p) = *slice {
                     if !before.is_empty() { self.word_space(",")?; }
                     if p.node != PatKind::Wild {
-                        self.print_pat(&p)?;
+                        self.print_pat(p)?;
                     }
                     word(&mut self.s, "..")?;
                     if !after.is_empty() { self.word_space(",")?; }
                 }
                 self.commasep(Inconsistent,
                                    &after[..],
-                                   |s, p| s.print_pat(&p))?;
+                                   |s, p| s.print_pat(p))?;
                 word(&mut self.s, "]")?;
             }
             PatKind::Mac(ref m) => self.print_mac(m, token::Paren)?,
@@ -2628,12 +2614,12 @@ fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> {
                 space(&mut self.s)?;
                 self.word_space("|")?;
             }
-            self.print_pat(&p)?;
+            self.print_pat(p)?;
         }
         space(&mut self.s)?;
         if let Some(ref e) = arm.guard {
             self.word_space("if")?;
-            self.print_expr(&e)?;
+            self.print_expr(e)?;
             space(&mut self.s)?;
         }
         self.word_space("=>")?;
@@ -2641,7 +2627,7 @@ fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> {
         match arm.body.node {
             ast::ExprKind::Block(ref blk) => {
                 // the block will close the pattern's ibox
-                self.print_block_unclosed_indent(&blk, INDENT_UNIT)?;
+                self.print_block_unclosed_indent(blk, INDENT_UNIT)?;
 
                 // If it is a user-provided unsafe block, print a comma after it
                 if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
@@ -2673,7 +2659,7 @@ fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) -> io::Resu
                 self.print_mutability(m)?;
                 word(&mut self.s, "self")?;
                 self.word_space(":")?;
-                self.print_type(&typ)
+                self.print_type(typ)
             }
         }
     }
@@ -2725,7 +2711,7 @@ pub fn print_fn_block_args(
         self.word_space("->")?;
         match decl.output {
             ast::FunctionRetTy::Ty(ref ty) => {
-                self.print_type(&ty)?;
+                self.print_type(ty)?;
                 self.maybe_print_comment(ty.span.lo)
             }
             ast::FunctionRetTy::Default(..) => unreachable!(),
@@ -2839,7 +2825,7 @@ pub fn print_ty_param(&mut self, param: &ast::TyParam) -> io::Result<()> {
             Some(ref default) => {
                 space(&mut self.s)?;
                 self.word_space("=")?;
-                self.print_type(&default)
+                self.print_type(default)
             }
             _ => Ok(())
         }
@@ -2865,7 +2851,7 @@ pub fn print_where_clause(&mut self, where_clause: &ast::WhereClause)
                                                                              ref bounds,
                                                                              ..}) => {
                     self.print_formal_lifetime_list(bound_lifetimes)?;
-                    self.print_type(&bounded_ty)?;
+                    self.print_type(bounded_ty)?;
                     self.print_bounds(":", bounds)?;
                 }
                 ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime,
@@ -2977,7 +2963,7 @@ pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> io::Result<()> {
         match decl.output {
             ast::FunctionRetTy::Default(..) => unreachable!(),
             ast::FunctionRetTy::Ty(ref ty) =>
-                self.print_type(&ty)?
+                self.print_type(ty)?
         }
         self.end()?;
 
@@ -3044,14 +3030,9 @@ pub fn print_remaining_comments(&mut self) -> io::Result<()> {
         if self.next_comment().is_none() {
             hardbreak(&mut self.s)?;
         }
-        loop {
-            match self.next_comment() {
-                Some(ref cmnt) => {
-                    self.print_comment(cmnt)?;
-                    self.cur_cmnt_and_lit.cur_cmnt += 1;
-                }
-                _ => break
-            }
+        while let Some(ref cmnt) = self.next_comment() {
+            self.print_comment(cmnt)?;
+            self.cur_cmnt_and_lit.cur_cmnt += 1;
         }
         Ok(())
     }
index c7820a15fb3d23d9f8c0a6fb41c59e2e2af45012..8e257102e1c13367d281d01cce701e94bb0bacf5 100644 (file)
@@ -18,7 +18,7 @@
 use tokenstream::TokenStream;
 
 /// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
 /// The expanded code uses the unstable `#[prelude_import]` attribute.
 fn ignored_span(sp: Span) -> Span {
     let mark = Mark::fresh();
@@ -49,7 +49,7 @@ pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option<Strin
         None => return krate,
     };
 
-    let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
+    let crate_name = Symbol::intern(&alt_std_name.unwrap_or_else(|| name.to_string()));
 
     krate.module.items.insert(0, P(ast::Item {
         attrs: vec![attr::mk_attr_outer(DUMMY_SP,
index 91746a2edd9b2a62abc70b84d7647ce277b5174a..bb1a6ff65a596018a263bc7aa94e941e24da1b78 100644 (file)
@@ -106,9 +106,8 @@ fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate {
         // Add a special __test module to the crate that will contain code
         // generated for the test harness
         let (mod_, reexport) = mk_test_module(&mut self.cx);
-        match reexport {
-            Some(re) => folded.module.items.push(re),
-            None => {}
+        if let Some(re) = reexport {
+            folded.module.items.push(re)
         }
         folded.module.items.push(mod_);
         folded
@@ -257,7 +256,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt,
     let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
     cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
     let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
-        ident: sym.clone(),
+        ident: sym,
         attrs: Vec::new(),
         id: ast::DUMMY_NODE_ID,
         node: ast::ItemKind::Mod(reexport_mod),
@@ -308,7 +307,7 @@ fn generate_test_harness(sess: &ParseSess,
 }
 
 /// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
 /// The expanded code calls some unstable functions in the test crate.
 fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
     Span { ctxt: cx.ctxt, ..sp }
@@ -354,7 +353,7 @@ fn has_test_signature(i: &ast::Item) -> HasTestSignature {
         }
     }
 
-    return has_test_attr && has_test_signature(i) == Yes;
+    has_test_attr && has_test_signature(i) == Yes
 }
 
 fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
@@ -385,7 +384,7 @@ fn has_test_signature(i: &ast::Item) -> bool {
                       `fn(&mut Bencher) -> ()`");
     }
 
-    return has_bench_attr && has_test_signature(i);
+    has_bench_attr && has_test_signature(i)
 }
 
 fn is_ignored(i: &ast::Item) -> bool {
@@ -504,16 +503,14 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
                            ast::Unsafety::Normal,
                            dummy_spanned(ast::Constness::NotConst),
                            ::abi::Abi::Rust, ast::Generics::default(), main_body);
-    let main = P(ast::Item {
+    P(ast::Item {
         ident: Ident::from_str("main"),
         attrs: vec![main_attr],
         id: ast::DUMMY_NODE_ID,
         node: main,
         vis: ast::Visibility::Public,
         span: sp
-    });
-
-    return main;
+    })
 }
 
 fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
index 86bfdebe42b0082e206b6a836b66916ce4061846..9c1371a31fec7a9287f9f874c5d59defe042056e 100644 (file)
 
 //! # Token Streams
 //!
-//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
 //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
 //! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
 //!
 //! ## Ownership
-//! TokenStreams are persistent data structures constructed as ropes with reference
-//! counted-children. In general, this means that calling an operation on a TokenStream
-//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
-//! the original. This essentially coerces TokenStreams into 'views' of their subparts,
-//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
+//! `TokenStreams` are persistent data structures constructed as ropes with reference
+//! counted-children. In general, this means that calling an operation on a `TokenStream`
+//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
+//! the original. This essentially coerces `TokenStream`s into 'views' of their subparts,
+//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
 //! ownership of the original.
 
 use syntax_pos::{BytePos, Span, DUMMY_SP};
@@ -88,7 +88,7 @@ pub fn stream(&self) -> TokenStream {
 /// If the syntax extension is an MBE macro, it will attempt to match its
 /// LHS token tree against the provided token tree, and if it finds a
 /// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
+/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
 ///
 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
index a6fff2d707469ffd2d4d1607dfb658b62b4a1139..9307f3c58d4b0eb5d6942e9e8e6ee5ac23769688 100644 (file)
@@ -53,9 +53,10 @@ pub fn find_best_match_for_name<'a, T>(iter_names: T,
     iter_names
     .filter_map(|&name| {
         let dist = lev_distance(lookup, &name.as_str());
-        match dist <= max_dist {    // filter the unwanted cases
-            true => Some((name, dist)),
-            false => None,
+        if dist <= max_dist {    // filter the unwanted cases
+            Some((name, dist))
+        } else {
+            None
         }
     })
     .min_by_key(|&(_, val)| val)    // extract the tuple containing the minimum edit distance
index fe05e2958b3a851dd1e6828a99560cf07b3e082a..8cc37afa354ffbeed217349ae7d96cf01f1f0f28 100644 (file)
@@ -37,10 +37,10 @@ fn move_flat_map<F, I>(mut self, mut f: F) -> Self
                 // move the read_i'th item out of the vector and map it
                 // to an iterator
                 let e = ptr::read(self.get_unchecked(read_i));
-                let mut iter = f(e).into_iter();
+                let iter = f(e).into_iter();
                 read_i += 1;
 
-                while let Some(e) = iter.next() {
+                for e in iter {
                     if write_i < read_i {
                         ptr::write(self.get_unchecked_mut(write_i), e);
                         write_i += 1;
@@ -93,10 +93,10 @@ fn move_flat_map<F, I>(mut self, mut f: F) -> Self
                 // move the read_i'th item out of the vector and map it
                 // to an iterator
                 let e = ptr::read(self.get_unchecked(read_i));
-                let mut iter = f(e).into_iter();
+                let iter = f(e).into_iter();
                 read_i += 1;
 
-                while let Some(e) = iter.next() {
+                for e in iter {
                     if write_i < read_i {
                         ptr::write(self.get_unchecked_mut(write_i), e);
                         write_i += 1;
index 9288d95009c1de0822b4f3378ce9dd796cc8509e..0fa0753b22c8298276987aa44eff0d5d6c39f5bd 100644 (file)
@@ -27,6 +27,7 @@
 use ast::*;
 use syntax_pos::Span;
 use codemap::Spanned;
+use tokenstream::ThinTokenStream;
 
 #[derive(Copy, Clone, PartialEq, Eq)]
 pub enum FnKind<'a> {
@@ -112,6 +113,9 @@ fn visit_mac(&mut self, _mac: &'ast Mac) {
         // definition in your trait impl:
         // visit::walk_mac(self, _mac)
     }
+    fn visit_mac_def(&mut self, _mac: &'ast ThinTokenStream, _id: NodeId) {
+        // Nothing to do
+    }
     fn visit_path(&mut self, path: &'ast Path, _id: NodeId) {
         walk_path(self, path)
     }
@@ -290,7 +294,7 @@ pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) {
             walk_list!(visitor, visit_trait_item, methods);
         }
         ItemKind::Mac(ref mac) => visitor.visit_mac(mac),
-        ItemKind::MacroDef(..) => {},
+        ItemKind::MacroDef(ref ts) => visitor.visit_mac_def(ts, item.id),
     }
     walk_list!(visitor, visit_attribute, &item.attrs);
 }
@@ -345,9 +349,7 @@ pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) {
             visitor.visit_ty(ty);
             visitor.visit_expr(expression)
         }
-        TyKind::TraitObject(ref bounds) => {
-            walk_list!(visitor, visit_ty_param_bound, bounds);
-        }
+        TyKind::TraitObject(ref bounds) |
         TyKind::ImplTrait(ref bounds) => {
             walk_list!(visitor, visit_ty_param_bound, bounds);
         }
@@ -542,7 +544,7 @@ pub fn walk_fn<'a, V>(visitor: &mut V, kind: FnKind<'a>, declaration: &'a FnDecl
             walk_fn_decl(visitor, declaration);
             visitor.visit_block(body);
         }
-        FnKind::Method(_, ref sig, _, body) => {
+        FnKind::Method(_, sig, _, body) => {
             visitor.visit_generics(&sig.generics);
             walk_fn_decl(visitor, declaration);
             visitor.visit_block(body);
@@ -778,7 +780,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
         }
         ExprKind::InlineAsm(ref ia) => {
             for &(_, ref input) in &ia.inputs {
-                visitor.visit_expr(&input)
+                visitor.visit_expr(input)
             }
             for output in &ia.outputs {
                 visitor.visit_expr(&output.expr)
index 0d615db3deb4796270876c8b15597b4bdc10284f..ef048ac8ca355f9ea0e43f3bc5c1cb9774de0fb4 100644 (file)
@@ -542,6 +542,7 @@ struct ConsoleTestState<T> {
     passed: usize,
     failed: usize,
     ignored: usize,
+    filtered_out: usize,
     measured: usize,
     metrics: MetricMap,
     failures: Vec<(TestDesc, Vec<u8>)>,
@@ -570,6 +571,7 @@ pub fn new(opts: &TestOpts, _: Option<T>) -> io::Result<ConsoleTestState<io::Std
             passed: 0,
             failed: 0,
             ignored: 0,
+            filtered_out: 0,
             measured: 0,
             metrics: MetricMap::new(),
             failures: Vec::new(),
@@ -775,11 +777,12 @@ pub fn write_run_finish(&mut self) -> io::Result<bool> {
         } else {
             self.write_pretty("FAILED", term::color::RED)?;
         }
-        let s = format!(". {} passed; {} failed; {} ignored; {} measured\n\n",
+        let s = format!(". {} passed; {} failed; {} ignored; {} measured; {} filtered out\n\n",
                         self.passed,
                         self.failed,
                         self.ignored,
-                        self.measured);
+                        self.measured,
+                        self.filtered_out);
         self.write_plain(&s)?;
         return Ok(success);
     }
@@ -875,6 +878,7 @@ pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> io::Resu
     fn callback<T: Write>(event: &TestEvent, st: &mut ConsoleTestState<T>) -> io::Result<()> {
         match (*event).clone() {
             TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()),
+            TeFilteredOut(filtered_out) => Ok(st.filtered_out = filtered_out),
             TeWait(ref test, padding) => st.write_test_start(test, padding),
             TeTimeout(ref test) => st.write_timeout(test),
             TeResult(test, result, stdout) => {
@@ -957,6 +961,7 @@ fn should_sort_failures_before_printing_them() {
         passed: 0,
         failed: 0,
         ignored: 0,
+        filtered_out: 0,
         measured: 0,
         max_name_len: 10,
         metrics: MetricMap::new(),
@@ -1017,6 +1022,7 @@ pub enum TestEvent {
     TeWait(TestDesc, NamePadding),
     TeResult(TestDesc, TestResult, Vec<u8>),
     TeTimeout(TestDesc),
+    TeFilteredOut(usize),
 }
 
 pub type MonitorMsg = (TestDesc, TestResult, Vec<u8>);
@@ -1028,11 +1034,16 @@ pub fn run_tests<F>(opts: &TestOpts, tests: Vec<TestDescAndFn>, mut callback: F)
     use std::collections::HashMap;
     use std::sync::mpsc::RecvTimeoutError;
 
+    let tests_len = tests.len();
+
     let mut filtered_tests = filter_tests(opts, tests);
     if !opts.bench_benchmarks {
         filtered_tests = convert_benchmarks_to_tests(filtered_tests);
     }
 
+    let filtered_out = tests_len - filtered_tests.len();
+    callback(TeFilteredOut(filtered_out))?;
+
     let filtered_descs = filtered_tests.iter()
                                        .map(|t| t.desc.clone())
                                        .collect();
diff --git a/src/rust-installer b/src/rust-installer
deleted file mode 160000 (submodule)
index 2e6417f..0000000
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 2e6417f6af5218a29a8ee72ed17af085560b9b9c
index e0066dd43be89f13cb3d8ec3a8abf80d302ce5a6..ddd8631f02e62b4c0f4974539847f41b7afadb9a 100644 (file)
@@ -14,6 +14,7 @@
 // aux-build:bang_proc_macro.rs
 
 #![feature(proc_macro)]
+#![allow(unused_macros)]
 
 #[macro_use]
 extern crate derive_foo;
diff --git a/src/test/compile-fail/coherence-inherited-assoc-ty-cycle-err.rs b/src/test/compile-fail/coherence-inherited-assoc-ty-cycle-err.rs
new file mode 100644 (file)
index 0000000..5d7f339
--- /dev/null
@@ -0,0 +1,34 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Formerly this ICEd with the following message:
+// Tried to project an inherited associated type during coherence checking,
+// which is currently not supported.
+//
+// No we expect to run into a more user-friendly cycle error instead.
+
+#![feature(specialization)]
+
+trait Trait<T> { type Assoc; }
+//~^ unsupported cyclic reference between types/traits detected [E0391]
+
+impl<T> Trait<T> for Vec<T> {
+    type Assoc = ();
+}
+
+impl Trait<u8> for Vec<u8> {}
+
+impl<T> Trait<T> for String {
+    type Assoc = ();
+}
+
+impl Trait<<Vec<u8> as Trait<u8>>::Assoc> for String {}
+
+fn main() {}
index 1aabe6b87dff58107870358f47adb266823c8f66..9af501b141955f97d777a210afcf40c1e0a4914e 100644 (file)
@@ -10,6 +10,8 @@
 
 // gate-test-allow_internal_unstable
 
+#![allow(unused_macros)]
+
 macro_rules! bar {
     () => {
         // more layers don't help:
index 8a2d8dddac0741fdaff21fdcb8e4e1343637ba86..61a362cb37fb2c0d8d674447ba9af2b348bcf586 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
 macro_rules! foo {
     () => {}
index a0ac5d4c7204104237b06da36a47513b8e31d482..d710f5647dd9f97591f57c0740993bc551b3e4c7 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 macro_rules! invalid {
     _ => (); //~ ERROR invalid macro matcher
 }
index fefd432e22982c66c47b60f5d8ffdf56311c053b..f66c09291cc9e4b74539de7bde5193c9f7310225 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 macro_rules! test { ($wrong:t_ty ..) => () }
                   //~^ ERROR: invalid fragment specifier `t_ty`
 
index 6994d2199d276d4f7b18ff31e491659c4be83ab7..15eef429eab974667e3ea77daaada669acb10ccc 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 macro_rules! assign {
     (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected `*` or `+`
         $($a)* = $($b)*
index 0168ae7d910179ecf8a059ba923e25a7d8d4a13e..8b49772494a6687e1f34a106ec6bb5ab4ed87b91 100644 (file)
@@ -9,6 +9,7 @@
 // except according to those terms.
 
 #![deny(missing_fragment_specifier)] //~ NOTE lint level defined here
+#![allow(unused_macros)]
 
 macro_rules! m { ($i) => {} }
 //~^ ERROR missing fragment specifier
diff --git a/src/test/compile-fail/issue-41776.rs b/src/test/compile-fail/issue-41776.rs
new file mode 100644 (file)
index 0000000..5f108e0
--- /dev/null
@@ -0,0 +1,13 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+    include!(line!()); //~ ERROR argument must be a string literal
+}
index 1c543a5fdacbba3deac22b7a66372fdb67fc4059..267362f902d720faeed69655bb5cc382215bd4db 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 macro_rules! foo {
     ( $()* ) => {};
     //~^ ERROR repetition matches empty token tree
index ada06b0b3f452cdb3c97859e3ba551d05d4d706b..06f2d86e5d9bae93be3b5e23dbeea9ba5d34aaa8 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 mod macros_cant_escape_fns {
     fn f() {
         macro_rules! m { () => { 3 + 4 } }
index 001bc42274b4daea014bfc5a56f584e27f9dff19..6e80e9b574bcfae9c3932c8551a2cecc5e621ad9 100644 (file)
@@ -10,6 +10,8 @@
 //
 // Check the macro follow sets (see corresponding rpass test).
 
+#![allow(unused_macros)]
+
 // FOLLOW(pat) = {FatArrow, Comma, Eq, Or, Ident(if), Ident(in)}
 macro_rules! follow_pat {
     ($p:pat ()) => {};       //~ERROR  `$p:pat` is followed by `(`
index 0ee2221bbc14b9658c9f5010c94be25821b54f7b..21cc946ded60563a48b5fa7ab8f713a841c23d98 100644 (file)
@@ -11,6 +11,8 @@
 // Regression test for issue #25436: check that things which can be
 // followed by any token also permit X* to come afterwards.
 
+#![allow(unused_macros)]
+
 macro_rules! foo {
   ( $a:expr $($b:tt)* ) => { }; //~ ERROR not allowed for `expr` fragments
   ( $a:ty $($b:tt)* ) => { };   //~ ERROR not allowed for `ty` fragments
index fe758a4a6310fbfe925eeb75d0a1e9c234d0209c..e5fdba63b0f152859c0a1f82614d57515b10716f 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 macro_rules! errors_everywhere {
     ($ty:ty <) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty`
     ($ty:ty < foo ,) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty`
index 8381dc34a6a15f695630b44de9be358fdbdfc825..f5e7305e4ea9e61ba37d0429cfe9a1f5e58d4ca5 100644 (file)
@@ -10,6 +10,8 @@
 
 // aux-build:two_macros.rs
 
+#![allow(unused_macros)]
+
 macro_rules! foo { () => {} }
 macro_rules! macro_one { () => {} }
 #[macro_use(macro_two)] extern crate two_macros;
diff --git a/src/test/compile-fail/method-help-unsatisfied-bound.rs b/src/test/compile-fail/method-help-unsatisfied-bound.rs
deleted file mode 100644 (file)
index 6416d54..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-struct Foo;
-
-fn main() {
-    let a: Result<(), Foo> = Ok(());
-    a.unwrap();
-    //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
-    //~| NOTE the following trait bounds were not satisfied: `Foo : std::fmt::Debug`
-}
index b868b79365d9f683755398adf8b5dc2d496d7292..28a69e6f9e29beb5f900c78dbac33655681e0a95 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 // Issue #21370
 
 macro_rules! test {
index e9d09bb6ad9cd739a7a4a700e9063a5e0ef4165f..dda0d3fc9557dd79afd0b0e895691892a816af06 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 macro_rules! test {
     ($e:expr +) => () //~ ERROR not allowed for `expr` fragments
 }
diff --git a/src/test/compile-fail/unused-macro.rs b/src/test/compile-fail/unused-macro.rs
new file mode 100644 (file)
index 0000000..5e401c0
--- /dev/null
@@ -0,0 +1,39 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(unused_macros)]
+
+// Most simple case
+macro_rules! unused { //~ ERROR: unused macro definition
+    () => {};
+}
+
+// Test macros created by macros
+macro_rules! create_macro {
+    () => {
+        macro_rules! m { //~ ERROR: unused macro definition
+            () => {};
+        }
+    };
+}
+create_macro!();
+
+#[allow(unused_macros)]
+mod bar {
+    // Test that putting the #[deny] close to the macro's definition
+    // works.
+
+    #[deny(unused_macros)]
+    macro_rules! unused { //~ ERROR: unused macro definition
+        () => {};
+    }
+}
+
+fn main() {}
index d55cef434f8d3eeafd44d3e07bbe1245077df14b..02e1a585fa89d054d267c206ca636f4ff4f5c093 100644 (file)
@@ -8,4 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+#![allow(unused_macros)]
+
 macro_rules! macro_rules { () => {} } //~ ERROR user-defined macros may not be named `macro_rules`
index 1064c97b744a4e51a899957ed90ac8f6cf35b341..adc2b23441ef817355c6af0d2ff05a4cb878c388 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
-
 pub struct Point {
     pub x: f32,
     pub y: f32,
index a02b71a753cc3bdff1f9b05b4a1e71ba4f3c6676..d802c9a8352eb675af47f076a0036fa7d2dc9ac8 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
-
 #![crate_type="rlib"]
 
 #[cfg(rpass1)]
index 08eef2a73f68f21d2bdb9f72083689fb8f762b17..dcc1ced635fbf8b6a08a7c03410ef7eff95f2867 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
-
 pub struct Point {
     pub x: f32,
     pub y: f32,
index e69dc51119e920e8bbdd7455c0a5356f8d538c8f..8df1cf54da2b97c3e34abb97abcba9b802f2b239 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
-
 pub struct Point {
     pub x: f32,
     pub y: f32,
index 09db90d618b69ce9af6a0361c241047cc506a7f9..1483bf92c9708ec30bd567f7a37563c37f144ca2 100644 (file)
@@ -10,7 +10,6 @@
 
 // ignore-tidy-linelength
 
-// aux-build:extern_crate.rs
 //[rpass1] compile-flags: -g
 //[rpass2] compile-flags: -g
 //[rpass3] compile-flags: -g -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src
index 8a8c658acccbf2d936fe1648d65273b1682ba2ba..be4764c7d9948288d25bc00493f4e5706a48dccd 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 // revisions:rpass1 rpass2 rpass3
-// compile-flags: -Z query-dep-graph -g
+// compile-flags: -Z query-dep-graph -g -Zincremental-cc
 // aux-build:extern_crate.rs
 
 
index 39547fb7359f56e1ba64e78cd466b260481b71bc..4d84e844dedbbc6ff84839eff5207d872072176d 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
-
 #![allow(warnings)]
 #![crate_name = "a"]
 #![crate_type = "rlib"]
index 3ecd9aff3f8cdc348d5f55bb415bdf7301b36f77..ff5fd634714497bf53a38f11bc1aa323844a5a8c 100644 (file)
@@ -8,7 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
 // no-prefer-dynamic
 
 #![crate_type="rlib"]
index d14ebf78d8222b452975d9d7d985f665efac1027..2ddcaf157210dfef38201bad5d729743d5527db4 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
-
 #![crate_type="rlib"]
 
  #[cfg(rpass1)]
index 0393bcda991565f874e0cba700d01da027f2cca4..e1dba1317703d6c448b969d1327a3884b8cc113c 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: -Z incremental-cc
-
 #![crate_type="rlib"]
 
 #[cfg(rpass1)]
diff --git a/src/test/mir-opt/issue-41697.rs b/src/test/mir-opt/issue-41697.rs
new file mode 100644 (file)
index 0000000..47eeffe
--- /dev/null
@@ -0,0 +1,48 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41697. Using dump-mir was triggering
+// artificial cycles: during type-checking, we had to get the MIR for
+// the constant expressions in `[u8; 2]`, which in turn would trigger
+// an attempt to get the item-path, which in turn would request the
+// types of the impl, which would trigger a cycle. We supressed this
+// cycle now by forcing mir-dump to avoid asking for types of an impl.
+
+#![feature(rustc_attrs)]
+
+use std::sync::Arc;
+
+trait Foo {
+    fn get(&self) -> [u8; 2];
+}
+
+impl Foo for [u8; 2] {
+    fn get(&self) -> [u8; 2] {
+        *self
+    }
+}
+
+struct Bar<T: ?Sized>(T);
+
+fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
+    x
+}
+
+fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
+    x
+}
+
+fn main() {
+    let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
+    assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
+
+    let x: Arc<Foo + Send> = Arc::new([3, 4]);
+    assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
+}
index 0336fe277c51f8eb6f064d6e248d1f0d00e26653..e96588c6e5aea6a3978b4ea67d4f46ed4d903d3a 100644 (file)
@@ -15,6 +15,7 @@
 extern crate rustc_lint;
 extern crate rustc_metadata;
 extern crate rustc_errors;
+extern crate rustc_trans;
 extern crate syntax;
 
 use rustc::dep_graph::DepGraph;
@@ -58,8 +59,9 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc<CStore>) {
 
     let descriptions = Registry::new(&rustc::DIAGNOSTICS);
     let dep_graph = DepGraph::new(opts.build_dep_graph());
-    let cstore = Rc::new(CStore::new(&dep_graph));
+    let cstore = Rc::new(CStore::new(&dep_graph, Box::new(rustc_trans::LlvmMetadataLoader)));
     let sess = build_session(opts, &dep_graph, None, descriptions, cstore.clone());
+    rustc_trans::init(&sess);
     rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
     (sess, cstore)
 }
index f77b2fca857a613e3759bb50b4fec40995186fc6..37aab2bbd059aa61579de5e914b41bdb71381abc 100644 (file)
@@ -14,6 +14,7 @@
 
 extern crate rustc;
 extern crate rustc_plugin;
+extern crate rustc_trans;
 
 #[link(name = "llvm-function-pass", kind = "static")]
 #[link(name = "llvm-module-pass", kind = "static")]
diff --git a/src/test/run-pass-fulldeps/issue-35829.rs b/src/test/run-pass-fulldeps/issue-35829.rs
new file mode 100644 (file)
index 0000000..0a4c15a
--- /dev/null
@@ -0,0 +1,55 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-stage1
+// ignore-cross-compile
+#![feature(quote, rustc_private)]
+
+extern crate syntax;
+
+use syntax::ext::base::{ExtCtxt, DummyResolver};
+use syntax::ext::expand::ExpansionConfig;
+use syntax::parse::ParseSess;
+use syntax::codemap::{FilePathMapping, dummy_spanned};
+use syntax::print::pprust::expr_to_string;
+use syntax::ast::{Expr, ExprKind, LitKind, StrStyle, RangeLimits};
+use syntax::symbol::Symbol;
+use syntax::ptr::P;
+
+use std::rc::Rc;
+
+fn main() {
+    let parse_sess = ParseSess::new(FilePathMapping::empty());
+    let exp_cfg = ExpansionConfig::default("issue_35829".to_owned());
+    let mut resolver = DummyResolver;
+    let cx = ExtCtxt::new(&parse_sess, exp_cfg, &mut resolver);
+
+    // check byte string
+    let byte_string = quote_expr!(&cx, b"one");
+    let byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"one".to_vec()));
+    assert_eq!(byte_string.node, ExprKind::Lit(P(dummy_spanned(byte_string_lit_kind))));
+
+    // check raw byte string
+    let raw_byte_string = quote_expr!(&cx, br###"#"two"#"###);
+    let raw_byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"#\"two\"#".to_vec()));
+    assert_eq!(raw_byte_string.node, ExprKind::Lit(P(dummy_spanned(raw_byte_string_lit_kind))));
+
+    // check dotdotdot
+    let closed_range = quote_expr!(&cx, 0 ... 1);
+    assert_eq!(closed_range.node, ExprKind::Range(
+        Some(quote_expr!(&cx, 0)),
+        Some(quote_expr!(&cx, 1)),
+        RangeLimits::Closed
+    ));
+
+    // test case from 35829
+    let expr_35829 = quote_expr!(&cx, std::io::stdout().write(b"one"));
+    assert_eq!(expr_to_string(&expr_35829), r#"std::io::stdout().write(b"one")"#);
+}
index 01b0ed802354c74e19f15f217738caa7bf50a685..e7d0a83017be004c4128d318ea1978515e18ed22 100644 (file)
 extern crate syntax;
 extern crate syntax_pos;
 
-use syntax::ast::Ident;
-use syntax::parse::token;
+use syntax::ast::{Ident, Name};
+use syntax::parse::token::{self, Token, Lit};
 use syntax::tokenstream::TokenTree;
 
 fn main() {
     let true_tok = token::Ident(Ident::from_str("true"));
     assert!(quote!(true).eq_unspanned(&true_tok.into()));
+
+    // issue #35829, extended check to proc_macro.
+    let triple_dot_tok = Token::DotDotDot;
+    assert!(quote!(...).eq_unspanned(&triple_dot_tok.into()));
+
+    let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None);
+    assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into()));
+
+    let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None);
+    assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into()));
+
+    let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None);
+    assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into()));
 }
diff --git a/src/test/run-pass/issue-41697.rs b/src/test/run-pass/issue-41697.rs
deleted file mode 100644 (file)
index d59b6a1..0000000
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-Zdump-mir=NEVER_MATCHED
-
-// Regression test for #41697. Using dump-mir was triggering
-// artificial cycles: during type-checking, we had to get the MIR for
-// the constant expressions in `[u8; 2]`, which in turn would trigger
-// an attempt to get the item-path, which in turn would request the
-// types of the impl, which would trigger a cycle. We supressed this
-// cycle now by forcing mir-dump to avoid asking for types of an impl.
-
-#![feature(rustc_attrs)]
-
-use std::sync::Arc;
-
-trait Foo {
-    fn get(&self) -> [u8; 2];
-}
-
-impl Foo for [u8; 2] {
-    fn get(&self) -> [u8; 2] {
-        *self
-    }
-}
-
-struct Bar<T: ?Sized>(T);
-
-fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
-    x
-}
-
-fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
-    x
-}
-
-fn main() {
-    let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
-    assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
-
-    let x: Arc<Foo + Send> = Arc::new([3, 4]);
-    assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
-}
diff --git a/src/test/run-pass/issue-41803.rs b/src/test/run-pass/issue-41803.rs
new file mode 100644 (file)
index 0000000..e18b420
--- /dev/null
@@ -0,0 +1,30 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// A compile-time map from identifiers to arbitrary (heterogeneous) expressions
+macro_rules! ident_map {
+    ( $name:ident = { $($key:ident => $e:expr,)* } ) => {
+        macro_rules! $name {
+            $(
+                ( $key ) => { $e };
+            )*
+            // Empty invocation expands to nothing. Needed when the map is empty.
+            () => {};
+        }
+    };
+}
+
+ident_map!(my_map = {
+    main => 0,
+});
+
+fn main() {
+    my_map!(main);
+}
diff --git a/src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs b/src/test/run-pass/issue-41936-variance-coerce-unsized-cycle.rs
new file mode 100644 (file)
index 0000000..bfbead8
--- /dev/null
@@ -0,0 +1,38 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41936. The coerce-unsized trait check in
+// coherence was using subtyping, which triggered variance
+// computation, which failed because it required type info for fields
+// that had not (yet) been computed.
+
+#![feature(unsize)]
+#![feature(coerce_unsized)]
+
+use std::{marker,ops};
+
+// Change the array to a non-array, and error disappears
+// Adding a new field to the end keeps the error
+struct LogDataBuf([u8;8]);
+
+struct Aref<T: ?Sized>
+{
+    // Inner structure triggers the error, removing the inner removes the message.
+    ptr: Box<ArefInner<T>>,
+}
+impl<T: ?Sized + marker::Unsize<U>, U: ?Sized> ops::CoerceUnsized<Aref<U>> for Aref<T> {}
+
+struct ArefInner<T: ?Sized>
+{
+    // Even with this field commented out, the error is raised.
+    data: T,
+}
+
+fn main(){}
diff --git a/src/test/run-pass/specialization/assoc-ty-graph-cycle.rs b/src/test/run-pass/specialization/assoc-ty-graph-cycle.rs
new file mode 100644 (file)
index 0000000..a65dcf3
--- /dev/null
@@ -0,0 +1,33 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Make sure we don't crash with a cycle error during coherence.
+
+#![feature(specialization)]
+
+trait Trait<T> {
+    type Assoc;
+}
+
+impl<T> Trait<T> for Vec<T> {
+    default type Assoc = ();
+}
+
+impl Trait<u8> for Vec<u8> {
+    type Assoc = u8;
+}
+
+impl<T> Trait<T> for String {
+    type Assoc = ();
+}
+
+impl Trait<<Vec<u8> as Trait<u8>>::Assoc> for String {}
+
+fn main() {}
index 0e78746704fb150a838c7a885902e729a947d7f1..5c64b4118c3ab1a748e2175b3f74c82baa123efc 100644 (file)
@@ -18,9 +18,9 @@ impl Foo {
     pub fn rust0() {}
     // @has - '//code' 'fn rust1()'
     pub extern "Rust" fn rust1() {}
-    // @has - '//code' 'extern fn c0()'
+    // @has - '//code' 'extern "C" fn c0()'
     pub extern fn c0() {}
-    // @has - '//code' 'extern fn c1()'
+    // @has - '//code' 'extern "C" fn c1()'
     pub extern "C" fn c1() {}
     // @has - '//code' 'extern "system" fn system0()'
     pub extern "system" fn system0() {}
@@ -31,7 +31,7 @@ pub trait Bar {}
 
 // @has - '//code' 'impl Bar for fn()'
 impl Bar for fn() {}
-// @has - '//code' 'impl Bar for extern fn()'
+// @has - '//code' 'impl Bar for extern "C" fn()'
 impl Bar for extern fn() {}
 // @has - '//code' 'impl Bar for extern "system" fn()'
 impl Bar for extern "system" fn() {}
index 3997dcd81e153e75a20cb5a7fc2a5447e59ce788..8511d461703de58dfc9cf656656f0ec2b1839e53 100644 (file)
 
 extern crate rustdoc_ffi as lib;
 
-// @has ffi/fn.foreigner.html //pre 'pub unsafe extern fn foreigner(cold_as_ice: u32)'
+// @has ffi/fn.foreigner.html //pre 'pub unsafe extern "C" fn foreigner(cold_as_ice: u32)'
 pub use lib::foreigner;
 
 extern "C" {
-    // @has ffi/fn.another.html //pre 'pub unsafe extern fn another(cold_as_ice: u32)'
+    // @has ffi/fn.another.html //pre 'pub unsafe extern "C" fn another(cold_as_ice: u32)'
     pub fn another(cold_as_ice: u32);
 }
index 6f84428b0798ff36965118296d20a204c61fa4b3..75df53589454f6d3927e8d8fc382d9f3b4f7fc9d 100644 (file)
@@ -10,7 +10,7 @@
 
 extern {
     // @has issue_22038/fn.foo1.html \
-    //      '//*[@class="rust fn"]' 'pub unsafe extern fn foo1()'
+    //      '//*[@class="rust fn"]' 'pub unsafe extern "C" fn foo1()'
     pub fn foo1();
 }
 
@@ -21,7 +21,7 @@
 }
 
 // @has issue_22038/fn.bar.html \
-//      '//*[@class="rust fn"]' 'pub extern fn bar()'
+//      '//*[@class="rust fn"]' 'pub extern "C" fn bar()'
 pub extern fn bar() {}
 
 // @has issue_22038/fn.baz.html \
index 1b60c2a334fa5ea7ae15a4bd553a9cdd9bc791a6..6ba776ba4679f845c2dd9f3f880e586f7d056b34 100644 (file)
@@ -9,6 +9,6 @@
 // except according to those terms.
 
 extern "C" {
-    // @has variadic/fn.foo.html //pre 'pub unsafe extern fn foo(x: i32, ...)'
+    // @has variadic/fn.foo.html //pre 'pub unsafe extern "C" fn foo(x: i32, ...)'
     pub fn foo(x: i32, ...);
 }
index adc229aaacc54d77955283f760e5509e737aaf53..78e0f7e619b12791e7f52b917eb205959da36f9f 100644 (file)
@@ -4,7 +4,9 @@ error: no method named `count` found for type `std::iter::Filter<std::iter::Fuse
 17 |     once::<&str>("str").fuse().filter(|a: &str| true).count();
    |                                                       ^^^^^
    |
-   = note: the method `count` exists but the following trait bounds were not satisfied: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`, `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
+   = note: the method `count` exists but the following trait bounds were not satisfied:
+           `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`
+           `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
 
 error[E0281]: type mismatch: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53]` implements the trait `for<'r> std::ops::FnMut<(&'r str,)>`, but the trait `for<'r> std::ops::FnMut<(&'r &str,)>` is required
   --> $DIR/issue-36053-2.rs:17:32
diff --git a/src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.rs
new file mode 100644 (file)
index 0000000..a4eb445
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo;
+
+fn main() {
+    let a: Result<(), Foo> = Ok(());
+    a.unwrap();
+    //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
+    //~| NOTE the method `unwrap` exists but the following trait bounds were not satisfied
+}
diff --git a/src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr b/src/test/ui/mismatched_types/method-help-unsatisfied-bound.stderr
new file mode 100644 (file)
index 0000000..2bd786c
--- /dev/null
@@ -0,0 +1,11 @@
+error: no method named `unwrap` found for type `std::result::Result<(), Foo>` in the current scope
+  --> $DIR/method-help-unsatisfied-bound.rs:15:7
+   |
+15 |     a.unwrap();
+   |       ^^^^^^
+   |
+   = note: the method `unwrap` exists but the following trait bounds were not satisfied:
+           `Foo : std::fmt::Debug`
+
+error: aborting due to previous error
+
index 13d92c64d0153d95dbabeb49b828bbbef4b1bb34..397359840ecad02d5fe69b2a0cf328e98235ffea 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 13d92c64d0153d95dbabeb49b828bbbef4b1bb34
+Subproject commit 397359840ecad02d5fe69b2a0cf328e98235ffea
index 3d9a4fba6cdeefc5be4d7e1e2dd427b4fe50f95a..1b55dc792c2e5d1ec77182504fc4454acb151ca2 100644 (file)
@@ -41,7 +41,7 @@ macro_rules! t {
 }
 
 fn main() {
-    let docs = env::args().nth(1).unwrap();
+    let docs = env::args_os().nth(1).unwrap();
     let docs = env::current_dir().unwrap().join(docs);
     let mut errors = false;
     walk(&mut HashMap::new(), &docs, &docs, &mut errors);
@@ -65,7 +65,6 @@ enum Redirect {
 struct FileEntry {
     source: String,
     ids: HashSet<String>,
-    names: HashSet<String>,
 }
 
 type Cache = HashMap<PathBuf, FileEntry>;
@@ -73,7 +72,7 @@ struct FileEntry {
 impl FileEntry {
     fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
         if self.ids.is_empty() {
-            with_attrs_in_source(contents, " id", |fragment, i| {
+            with_attrs_in_source(contents, " id", |fragment, i, _| {
                 let frag = fragment.trim_left_matches("#").to_owned();
                 if !self.ids.insert(frag) {
                     *errors = true;
@@ -82,15 +81,6 @@ fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
             });
         }
     }
-
-    fn parse_names(&mut self, contents: &str) {
-        if self.names.is_empty() {
-            with_attrs_in_source(contents, " name", |fragment, _| {
-                let frag = fragment.trim_left_matches("#").to_owned();
-                self.names.insert(frag);
-            });
-        }
-    }
 }
 
 fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {
@@ -116,15 +106,8 @@ fn check(cache: &mut Cache,
          file: &Path,
          errors: &mut bool)
          -> Option<PathBuf> {
-    // ignore js files as they are not prone to errors as the rest of the
-    // documentation is and they otherwise bring up false positives.
-    if file.extension().and_then(|s| s.to_str()) == Some("js") {
-        return None;
-    }
-
-    // ignore handlebars files as they use {{}} to build links, we only
-    // want to test the generated files
-    if file.extension().and_then(|s| s.to_str()) == Some("hbs") {
+    // Ignore none HTML files.
+    if file.extension().and_then(|s| s.to_str()) != Some("html") {
         return None;
     }
 
@@ -147,13 +130,7 @@ fn check(cache: &mut Cache,
         return None;
     }
 
-    // mdbook uses the HTML <base> tag to handle links for subdirectories, which
-    // linkchecker doesn't support
-    if file.to_str().unwrap().contains("unstable-book") {
-        return None;
-    }
-
-    let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);
+    let res = load_file(cache, root, file, SkipRedirect);
     let (pretty_file, contents) = match res {
         Ok(res) => res,
         Err(_) => return None,
@@ -162,13 +139,10 @@ fn check(cache: &mut Cache,
         cache.get_mut(&pretty_file)
              .unwrap()
              .parse_ids(&pretty_file, &contents, errors);
-        cache.get_mut(&pretty_file)
-             .unwrap()
-             .parse_names(&contents);
     }
 
     // Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
-    with_attrs_in_source(&contents, " href", |url, i| {
+    with_attrs_in_source(&contents, " href", |url, i, base| {
         // Ignore external URLs
         if url.starts_with("http:") || url.starts_with("https:") ||
            url.starts_with("javascript:") || url.starts_with("ftp:") ||
@@ -184,9 +158,9 @@ fn check(cache: &mut Cache,
         // Once we've plucked out the URL, parse it using our base url and
         // then try to extract a file path.
         let mut path = file.to_path_buf();
-        if !url.is_empty() {
+        if !base.is_empty() || !url.is_empty() {
             path.pop();
-            for part in Path::new(url).components() {
+            for part in Path::new(base).join(url).components() {
                 match part {
                     Component::Prefix(_) |
                     Component::RootDir => panic!(),
@@ -197,13 +171,6 @@ fn check(cache: &mut Cache,
             }
         }
 
-        if let Some(extension) = path.extension() {
-            // don't check these files
-            if extension == "png" {
-                return;
-            }
-        }
-
         // Alright, if we've found a file name then this file had better
         // exist! If it doesn't then we register and print an error.
         if path.exists() {
@@ -218,11 +185,17 @@ fn check(cache: &mut Cache,
                          pretty_path.display());
                 return;
             }
-            let res = load_file(cache, root, path.clone(), FromRedirect(false));
+            if let Some(extension) = path.extension() {
+                // Ignore none HTML files.
+                if extension != "html" {
+                    return;
+                }
+            }
+            let res = load_file(cache, root, &path, FromRedirect(false));
             let (pretty_path, contents) = match res {
                 Ok(res) => res,
                 Err(LoadError::IOError(err)) => {
-                    panic!(format!("error loading {}: {}", path.display(), err));
+                    panic!("error loading {}: {}", path.display(), err);
                 }
                 Err(LoadError::BrokenRedirect(target, _)) => {
                     *errors = true;
@@ -245,11 +218,10 @@ fn check(cache: &mut Cache,
 
                 let entry = &mut cache.get_mut(&pretty_path).unwrap();
                 entry.parse_ids(&pretty_path, &contents, errors);
-                entry.parse_names(&contents);
 
-                if !(entry.ids.contains(*fragment) || entry.names.contains(*fragment)) {
+                if !entry.ids.contains(*fragment) {
                     *errors = true;
-                    print!("{}:{}: broken link fragment  ",
+                    print!("{}:{}: broken link fragment ",
                            pretty_file.display(),
                            i + 1);
                     println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
@@ -267,7 +239,7 @@ fn check(cache: &mut Cache,
 
 fn load_file(cache: &mut Cache,
              root: &Path,
-             mut file: PathBuf,
+             file: &Path,
              redirect: Redirect)
              -> Result<(PathBuf, String), LoadError> {
     let mut contents = String::new();
@@ -279,9 +251,9 @@ fn load_file(cache: &mut Cache,
             None
         }
         Entry::Vacant(entry) => {
-            let mut fp = File::open(file.clone()).map_err(|err| {
+            let mut fp = File::open(file).map_err(|err| {
                 if let FromRedirect(true) = redirect {
-                    LoadError::BrokenRedirect(file.clone(), err)
+                    LoadError::BrokenRedirect(file.to_path_buf(), err)
                 } else {
                     LoadError::IOError(err)
                 }
@@ -297,17 +269,14 @@ fn load_file(cache: &mut Cache,
                 entry.insert(FileEntry {
                     source: contents.clone(),
                     ids: HashSet::new(),
-                    names: HashSet::new(),
                 });
             }
             maybe
         }
     };
-    file.pop();
-    match maybe_redirect.map(|url| file.join(url)) {
+    match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {
         Some(redirect_file) => {
-            let path = PathBuf::from(redirect_file);
-            load_file(cache, root, path, FromRedirect(true))
+            load_file(cache, root, &redirect_file, FromRedirect(true))
         }
         None => Ok((pretty_file, contents)),
     }
@@ -329,10 +298,14 @@ fn maybe_redirect(source: &str) -> Option<String> {
     })
 }
 
-fn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f: F) {
+fn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {
+    let mut base = "";
     for (i, mut line) in contents.lines().enumerate() {
         while let Some(j) = line.find(attr) {
             let rest = &line[j + attr.len()..];
+            // The base tag should always be the first link in the document so
+            // we can get away with using one pass.
+            let is_base = line[..j].ends_with("<base");
             line = rest;
             let pos_equals = match rest.find("=") {
                 Some(i) => i,
@@ -358,7 +331,11 @@ fn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f
                 Some(i) => &rest[..i],
                 None => continue,
             };
-            f(url, i)
+            if is_base {
+                base = url;
+                continue;
+            }
+            f(url, i, base)
         }
     }
 }
diff --git a/src/tools/rust-installer b/src/tools/rust-installer
new file mode 160000 (submodule)
index 0000000..daa2a05
--- /dev/null
@@ -0,0 +1 @@
+Subproject commit daa2a05ebe7b8d07a309e8891ebc548652362954
index 8b7da2267cdedc191fb7e3f9e0e1f64c3368475f..ba02ee0c6d73b7a39cfa9caf95695cec951b10f2 100644 (file)
@@ -32,6 +32,7 @@
     "openssl", // BSD+advertising clause, cargo, mdbook
     "pest", // MPL2, mdbook via handlebars
     "thread-id", // Apache-2.0, mdbook
+    "strings", // this is actually MIT/Apache-2.0 but it's not in the manifest yet
 ];
 
 pub fn check(path: &Path, bad: &mut bool) {
index f14a6a03893b39cd1313b789febc938c42d83252..6b666fa809f7a8cbfeb1d099ff25289eaed5834f 100644 (file)
@@ -81,12 +81,12 @@ fn filter_dirs(path: &Path) -> bool {
         "src/libbacktrace",
         "src/compiler-rt",
         "src/rustllvm",
-        "src/rust-installer",
         "src/liblibc",
         "src/vendor",
         "src/rt/hoedown",
         "src/tools/cargo",
         "src/tools/rls",
+        "src/tools/rust-installer",
     ];
     skip.iter().any(|p| path.ends_with(p))
 }